From 2d8faa875c25c3173caac70596103af94a050dc6 Mon Sep 17 00:00:00 2001 From: Fang Xing <155562079+fang-xing-esql@users.noreply.github.com> Date: Wed, 8 May 2024 10:31:57 -0400 Subject: [PATCH 001/119] [ES|QL] Create MockBigArrays with CircuitBreaker in AbstractFunctionTestCases (#108195) * create mockbigarrays with circuit breaker --- .../compute/data/BytesRefBlockBuilder.java | 4 --- .../compute/data/X-BlockBuilder.java.st | 4 --- .../function/AbstractFunctionTestCase.java | 25 ++++++++++++++----- 3 files changed, 19 insertions(+), 14 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java index 49075789ed4a..6232cbdd2717 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java @@ -21,10 +21,6 @@ final class BytesRefBlockBuilder extends AbstractBlockBuilder implements BytesRe private BytesRefArray values; - BytesRefBlockBuilder(int estimatedSize, BlockFactory blockFactory) { - this(estimatedSize, BigArrays.NON_RECYCLING_INSTANCE, blockFactory); - } - BytesRefBlockBuilder(int estimatedSize, BigArrays bigArrays, BlockFactory blockFactory) { super(blockFactory); values = new BytesRefArray(Math.max(estimatedSize, 2), bigArrays); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st index 0d3d2293a1bb..8397a0f5274f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st @@ -31,10 +31,6 @@ final class $Type$BlockBuilder extends AbstractBlockBuilder implements $Type$Blo $if(BytesRef)$ private BytesRefArray values; - BytesRefBlockBuilder(int estimatedSize, BlockFactory blockFactory) { - this(estimatedSize, BigArrays.NON_RECYCLING_INSTANCE, blockFactory); - } - BytesRefBlockBuilder(int estimatedSize, BigArrays bigArrays, BlockFactory blockFactory) { super(blockFactory); values = new BytesRefArray(Math.max(estimatedSize, 2), bigArrays); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 1a410c518e9b..1fd7cfe36806 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -307,7 +307,13 @@ private Object toJavaObjectUnsignedLongAware(Block block, int position) { *

*/ public final void testEvaluateBlockWithoutNulls() { - testEvaluateBlock(driverContext().blockFactory(), driverContext(), false); + assumeTrue("no warning is expected", testCase.getExpectedWarnings() == null); + try { + testEvaluateBlock(driverContext().blockFactory(), driverContext(), false); + } catch (CircuitBreakingException ex) { + assertThat(ex.getMessage(), equalTo(MockBigArrays.ERROR_MESSAGE)); + assertFalse("Test data is too large to fit in the memory", true); + } } /** @@ -315,7 +321,13 @@ public final void testEvaluateBlockWithoutNulls() { * some null values inserted between. */ public final void testEvaluateBlockWithNulls() { - testEvaluateBlock(driverContext().blockFactory(), driverContext(), true); + assumeTrue("no warning is expected", testCase.getExpectedWarnings() == null); + try { + testEvaluateBlock(driverContext().blockFactory(), driverContext(), true); + } catch (CircuitBreakingException ex) { + assertThat(ex.getMessage(), equalTo(MockBigArrays.ERROR_MESSAGE)); + assertFalse("Test data is too large to fit in the memory", true); + } } /** @@ -1543,17 +1555,18 @@ private static void writeToTempDir(String subdir, String str, String extension) private final List breakers = Collections.synchronizedList(new ArrayList<>()); protected final DriverContext driverContext() { - MockBigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofGb(1)); + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofMb(256)).withCircuitBreaking(); CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); breakers.add(breaker); - return new DriverContext(bigArrays.withCircuitBreaking(), new BlockFactory(breaker, bigArrays)); + return new DriverContext(bigArrays, new BlockFactory(breaker, bigArrays)); } protected final DriverContext crankyContext() { - BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new CrankyCircuitBreakerService()); + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new CrankyCircuitBreakerService()) + .withCircuitBreaking(); CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); breakers.add(breaker); - return new DriverContext(bigArrays.withCircuitBreaking(), new BlockFactory(breaker, bigArrays)); + return new DriverContext(bigArrays, new BlockFactory(breaker, bigArrays)); } @After From e2c19f2ac8127f12eba7d045064cb1d580d8e64e Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Wed, 8 May 2024 17:24:25 +0200 Subject: [PATCH 002/119] Fix semantic text for non snapshot tests (#108372) --- x-pack/plugin/inference/build.gradle | 8 ++++++++ x-pack/plugin/ml/build.gradle | 7 ------- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/x-pack/plugin/inference/build.gradle b/x-pack/plugin/inference/build.gradle index 0aef8601ffcc..3e2171d0654d 100644 --- a/x-pack/plugin/inference/build.gradle +++ b/x-pack/plugin/inference/build.gradle @@ -4,6 +4,8 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ +import org.elasticsearch.gradle.internal.info.BuildParams + apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.internal-yaml-rest-test' @@ -36,6 +38,12 @@ dependencies { api "com.ibm.icu:icu4j:${versions.icu4j}" } +if (BuildParams.isSnapshotBuild() == false) { + tasks.named("test").configure { + systemProperty 'es.semantic_text_feature_flag_enabled', 'true' + } +} + tasks.named('yamlRestTest') { usesDefaultDistribution() } diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index 26f5ea053771..f42dcc6179d0 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -1,6 +1,5 @@ import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.internal.dra.DraResolvePlugin -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' @@ -115,12 +114,6 @@ artifacts { archives tasks.named("jar") } -if (BuildParams.isSnapshotBuild() == false) { - tasks.named("test").configure { - systemProperty 'es.semantic_text_feature_flag_enabled', 'true' - } -} - tasks.register("extractNativeLicenses", Copy) { dependsOn configurations.nativeBundle into "${buildDir}/extractedNativeLicenses" From 2f94aeea0c8dfe1f448381171f80fa6ffbbcfaa5 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Wed, 8 May 2024 08:25:44 -0700 Subject: [PATCH 003/119] Refactor rolling upgrade tests to make it easier to customize (#108393) --- .../AbstractRollingUpgradeTestCase.java | 54 +++++++++++++++++++ .../upgrades/ClusterFeatureMigrationIT.java | 2 +- .../upgrades/DesiredNodesUpgradeIT.java | 2 +- .../elasticsearch/upgrades/DownsampleIT.java | 2 +- .../upgrades/FeatureUpgradeIT.java | 2 +- .../elasticsearch/upgrades/FieldCapsIT.java | 2 +- .../upgrades/HealthNodeUpgradeIT.java | 2 +- .../IgnoredMetaFieldRollingUpgradeIT.java | 2 +- .../elasticsearch/upgrades/IndexingIT.java | 2 +- .../ParameterizedRollingUpgradeTestCase.java | 49 ++++------------- .../upgrades/SnapshotBasedRecoveryIT.java | 2 +- .../upgrades/SystemIndicesUpgradeIT.java | 2 +- .../org/elasticsearch/upgrades/TsdbIT.java | 2 +- .../UpgradeWithOldIndexSettingsIT.java | 2 +- .../upgrades/VectorSearchIT.java | 2 +- .../org/elasticsearch/upgrades/XPackIT.java | 2 +- .../application/InferenceUpgradeTestCase.java | 4 +- 17 files changed, 80 insertions(+), 55 deletions(-) create mode 100644 qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/AbstractRollingUpgradeTestCase.java diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/AbstractRollingUpgradeTestCase.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/AbstractRollingUpgradeTestCase.java new file mode 100644 index 000000000000..4837afbf6ccd --- /dev/null +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/AbstractRollingUpgradeTestCase.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.FeatureFlag; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TemporaryFolder; +import org.junit.rules.TestRule; + +import java.util.function.Supplier; + +public abstract class AbstractRollingUpgradeTestCase extends ParameterizedRollingUpgradeTestCase { + + private static final TemporaryFolder repoDirectory = new TemporaryFolder(); + + private static final ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .version(getOldClusterTestVersion()) + .nodes(NODE_NUM) + .setting("path.repo", new Supplier<>() { + @Override + @SuppressForbidden(reason = "TemporaryFolder only has io.File methods, not nio.File") + public String get() { + return repoDirectory.getRoot().getPath(); + } + }) + .setting("xpack.security.enabled", "false") + .feature(FeatureFlag.TIME_SERIES_MODE) + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(repoDirectory).around(cluster); + + protected AbstractRollingUpgradeTestCase(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + @Override + protected ElasticsearchCluster getUpgradeCluster() { + return cluster; + } +} diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java index 0487b282179a..73abb634dfd7 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java @@ -24,7 +24,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasSize; -public class ClusterFeatureMigrationIT extends ParameterizedRollingUpgradeTestCase { +public class ClusterFeatureMigrationIT extends AbstractRollingUpgradeTestCase { @Before public void checkMigrationVersion() { diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java index 73d91ac41fcb..c7f99b3525f7 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DesiredNodesUpgradeIT.java @@ -33,7 +33,7 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; -public class DesiredNodesUpgradeIT extends ParameterizedRollingUpgradeTestCase { +public class DesiredNodesUpgradeIT extends AbstractRollingUpgradeTestCase { private final int desiredNodesVersion; diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java index 757f793ac4c4..488cd966ed65 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/DownsampleIT.java @@ -25,7 +25,7 @@ import static org.hamcrest.Matchers.equalTo; -public class DownsampleIT extends ParameterizedRollingUpgradeTestCase { +public class DownsampleIT extends AbstractRollingUpgradeTestCase { private static final String FIXED_INTERVAL = "1h"; private String index; diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FeatureUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FeatureUpgradeIT.java index 4fe45c05b157..fc77eef0ae8b 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FeatureUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FeatureUpgradeIT.java @@ -23,7 +23,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -public class FeatureUpgradeIT extends ParameterizedRollingUpgradeTestCase { +public class FeatureUpgradeIT extends AbstractRollingUpgradeTestCase { public FeatureUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FieldCapsIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FieldCapsIT.java index 860cd2c0e861..306447d8cc2c 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FieldCapsIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FieldCapsIT.java @@ -40,7 +40,7 @@ * the co-ordinating node if older nodes were included in the system */ @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103473") -public class FieldCapsIT extends ParameterizedRollingUpgradeTestCase { +public class FieldCapsIT extends AbstractRollingUpgradeTestCase { public FieldCapsIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java index 0f210ee4b245..6647cb413c9f 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/HealthNodeUpgradeIT.java @@ -20,7 +20,7 @@ import static org.hamcrest.CoreMatchers.equalTo; -public class HealthNodeUpgradeIT extends ParameterizedRollingUpgradeTestCase { +public class HealthNodeUpgradeIT extends AbstractRollingUpgradeTestCase { public HealthNodeUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IgnoredMetaFieldRollingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IgnoredMetaFieldRollingUpgradeIT.java index 874fac615b9b..1477e2b63cf0 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IgnoredMetaFieldRollingUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IgnoredMetaFieldRollingUpgradeIT.java @@ -26,7 +26,7 @@ import java.util.Locale; import java.util.Map; -public class IgnoredMetaFieldRollingUpgradeIT extends ParameterizedRollingUpgradeTestCase { +public class IgnoredMetaFieldRollingUpgradeIT extends AbstractRollingUpgradeTestCase { private static final String TERMS_AGG_QUERY = Strings.format(""" { diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java index 82485130f05c..157e2293b69a 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java @@ -51,7 +51,7 @@ * xpack rolling restart tests. We should work on a way to remove this * duplication but for now we have no real way to share code. */ -public class IndexingIT extends ParameterizedRollingUpgradeTestCase { +public class IndexingIT extends AbstractRollingUpgradeTestCase { public IndexingIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java index 63ed54d05adf..d5f645c387d6 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java @@ -14,74 +14,45 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.test.cluster.ElasticsearchCluster; -import org.elasticsearch.test.cluster.FeatureFlag; -import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.cluster.util.Version; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.ObjectPath; import org.elasticsearch.test.rest.TestFeatureService; import org.junit.AfterClass; import org.junit.Before; -import org.junit.ClassRule; -import org.junit.rules.RuleChain; -import org.junit.rules.TemporaryFolder; -import org.junit.rules.TestRule; import java.util.HashSet; import java.util.Map; import java.util.Set; -import java.util.function.Supplier; import java.util.stream.IntStream; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; public abstract class ParameterizedRollingUpgradeTestCase extends ESRestTestCase { + protected static final int NODE_NUM = 3; private static final String OLD_CLUSTER_VERSION = System.getProperty("tests.old_cluster_version"); - - private static final TemporaryFolder repoDirectory = new TemporaryFolder(); - - private static final int NODE_NUM = 3; - - private static final ElasticsearchCluster cluster = ElasticsearchCluster.local() - .distribution(DistributionType.DEFAULT) - .version(getOldClusterTestVersion()) - .nodes(NODE_NUM) - .setting("path.repo", new Supplier<>() { - @Override - @SuppressForbidden(reason = "TemporaryFolder only has io.File methods, not nio.File") - public String get() { - return repoDirectory.getRoot().getPath(); - } - }) - .setting("xpack.security.enabled", "false") - .feature(FeatureFlag.TIME_SERIES_MODE) - .build(); - - @ClassRule - public static TestRule ruleChain = RuleChain.outerRule(repoDirectory).around(cluster); - - @ParametersFactory(shuffle = false) - public static Iterable parameters() { - return IntStream.rangeClosed(0, NODE_NUM).boxed().map(n -> new Object[] { n }).toList(); - } - private static final Set upgradedNodes = new HashSet<>(); private static TestFeatureService oldClusterTestFeatureService = null; private static boolean upgradeFailed = false; private static IndexVersion oldIndexVersion; - private final int requestedUpgradedNodes; protected ParameterizedRollingUpgradeTestCase(@Name("upgradedNodes") int upgradedNodes) { this.requestedUpgradedNodes = upgradedNodes; } + @ParametersFactory(shuffle = false) + public static Iterable parameters() { + return IntStream.rangeClosed(0, NODE_NUM).boxed().map(n -> new Object[] { n }).toList(); + } + + protected abstract ElasticsearchCluster getUpgradeCluster(); + @Before public void extractOldClusterFeatures() { if (isOldCluster() && oldClusterTestFeatureService == null) { @@ -135,7 +106,7 @@ public void upgradeNode() throws Exception { if (upgradedNodes.add(n)) { try { logger.info("Upgrading node {} to version {}", n, Version.CURRENT); - cluster.upgradeNodeToVersion(n, Version.CURRENT); + getUpgradeCluster().upgradeNodeToVersion(n, Version.CURRENT); } catch (Exception e) { upgradeFailed = true; throw e; @@ -199,7 +170,7 @@ protected static boolean isUpgradedCluster() { @Override protected String getTestRestCluster() { - return cluster.getHttpAddresses(); + return getUpgradeCluster().getHttpAddresses(); } @Override diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java index ef80643c82c0..593630546845 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java @@ -42,7 +42,7 @@ import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.notNullValue; -public class SnapshotBasedRecoveryIT extends ParameterizedRollingUpgradeTestCase { +public class SnapshotBasedRecoveryIT extends AbstractRollingUpgradeTestCase { public SnapshotBasedRecoveryIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SystemIndicesUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SystemIndicesUpgradeIT.java index fbd6ee8aa375..a2e3b03c9036 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SystemIndicesUpgradeIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SystemIndicesUpgradeIT.java @@ -23,7 +23,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; -public class SystemIndicesUpgradeIT extends ParameterizedRollingUpgradeTestCase { +public class SystemIndicesUpgradeIT extends AbstractRollingUpgradeTestCase { public SystemIndicesUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java index 3ce0fc79087c..2889885f8398 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/TsdbIT.java @@ -26,7 +26,7 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -public class TsdbIT extends ParameterizedRollingUpgradeTestCase { +public class TsdbIT extends AbstractRollingUpgradeTestCase { public TsdbIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java index 3af344051030..8dc3b43abf3e 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/UpgradeWithOldIndexSettingsIT.java @@ -24,7 +24,7 @@ import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HITS_AS_INT_PARAM; import static org.hamcrest.Matchers.is; -public class UpgradeWithOldIndexSettingsIT extends ParameterizedRollingUpgradeTestCase { +public class UpgradeWithOldIndexSettingsIT extends AbstractRollingUpgradeTestCase { public UpgradeWithOldIndexSettingsIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java index e78e0978b1d8..21dbad9487d4 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/VectorSearchIT.java @@ -22,7 +22,7 @@ import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; -public class VectorSearchIT extends ParameterizedRollingUpgradeTestCase { +public class VectorSearchIT extends AbstractRollingUpgradeTestCase { public VectorSearchIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); } diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/XPackIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/XPackIT.java index dade5b53adda..6379a8875dfb 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/XPackIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/XPackIT.java @@ -22,7 +22,7 @@ * Basic tests for simple xpack functionality that are only run if the * cluster is the on the default distribution. */ -public class XPackIT extends ParameterizedRollingUpgradeTestCase { +public class XPackIT extends AbstractRollingUpgradeTestCase { public XPackIT(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); diff --git a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java index fe08db9b94b8..ecfec2304c8a 100644 --- a/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java +++ b/x-pack/plugin/inference/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/xpack/application/InferenceUpgradeTestCase.java @@ -13,7 +13,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.http.MockWebServer; -import org.elasticsearch.upgrades.ParameterizedRollingUpgradeTestCase; +import org.elasticsearch.upgrades.AbstractRollingUpgradeTestCase; import java.io.IOException; import java.util.List; @@ -21,7 +21,7 @@ import static org.elasticsearch.core.Strings.format; -public class InferenceUpgradeTestCase extends ParameterizedRollingUpgradeTestCase { +public class InferenceUpgradeTestCase extends AbstractRollingUpgradeTestCase { public InferenceUpgradeTestCase(@Name("upgradedNodes") int upgradedNodes) { super(upgradedNodes); From 616e71963e195ed3306fb2721c139f6477b33e8f Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Wed, 8 May 2024 18:02:02 +0200 Subject: [PATCH 004/119] [Inference API] Add Azure OpenAI completion support (#108352) --- .../org/elasticsearch/TransportVersions.java | 1 + .../org/elasticsearch/test/ESTestCase.java | 6 + .../azureopenai/AzureOpenAiActionCreator.java | 7 + .../azureopenai/AzureOpenAiActionVisitor.java | 3 + .../AzureOpenAiCompletionAction.java | 67 ++++++ .../AzureOpenAiCompletionRequestManager.java | 58 +++++ .../AzureOpenAiCompletionRequest.java | 70 ++++++ .../AzureOpenAiCompletionRequestEntity.java | 64 +++++ .../AzureOpenAiEmbeddingsRequest.java | 27 +-- .../azureopenai/AzureOpenAiRequest.java | 36 ++- .../request/azureopenai/AzureOpenAiUtils.java | 2 + .../external/response/XContentUtils.java | 2 +- .../AzureOpenAiCompletionResponseEntity.java | 114 +++++++++ .../azureopenai/AzureOpenAiModel.java | 41 ++++ .../AzureOpenAiSecretSettings.java | 31 ++- .../azureopenai/AzureOpenAiService.java | 29 ++- .../AzureOpenAiCompletionModel.java | 121 ++++++++++ ...reOpenAiCompletionRequestTaskSettings.java | 38 +++ .../AzureOpenAiCompletionServiceSettings.java | 183 ++++++++++++++ .../AzureOpenAiCompletionTaskSettings.java | 105 +++++++++ .../AzureOpenAiEmbeddingsModel.java | 37 +-- .../AzureOpenAiEmbeddingsServiceSettings.java | 2 +- .../AzureOpenAiActionCreatorTests.java | 223 ++++++++++++++++-- .../AzureOpenAiCompletionActionTests.java | 200 ++++++++++++++++ .../azureopenai/AzureOpenAiRequestTests.java | 62 +++++ ...ureOpenAiCompletionRequestEntityTests.java | 45 ++++ .../AzureOpenAiCompletionRequestTests.java | 100 ++++++++ ...ureOpenAiEmbeddingsRequestEntityTests.java | 3 +- .../AzureOpenAiEmbeddingsRequestTests.java | 53 +++-- .../external/response/XContentUtilsTests.java | 18 ++ ...reOpenAiCompletionResponseEntityTests.java | 220 +++++++++++++++++ ...enAiChatCompletionResponseEntityTests.java | 6 +- .../AzureOpenAiCompletionModelTests.java | 142 +++++++++++ ...nAiCompletionRequestTaskSettingsTests.java | 45 ++++ ...eOpenAiCompletionServiceSettingsTests.java | 92 ++++++++ ...zureOpenAiCompletionTaskSettingsTests.java | 99 ++++++++ .../AzureOpenAiEmbeddingsModelTests.java | 30 +++ 37 files changed, 2283 insertions(+), 99 deletions(-) create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionAction.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiCompletionRequestManager.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequest.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequestEntity.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntity.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModel.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettings.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionActionTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequestTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestEntityTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestTests.java rename x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/{ => embeddings}/AzureOpenAiEmbeddingsRequestEntityTests.java (96%) rename x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/{ => embeddings}/AzureOpenAiEmbeddingsRequestTests.java (73%) create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntityTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModelTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettingsTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettingsTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettingsTests.java diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 1cc7e47cddda..db43a12cf901 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -195,6 +195,7 @@ static TransportVersion def(int id) { public static final TransportVersion INDEXING_PRESSURE_REQUEST_REJECTIONS_COUNT = def(8_652_00_0); public static final TransportVersion ROLLUP_USAGE = def(8_653_00_0); public static final TransportVersion SECURITY_ROLE_DESCRIPTION = def(8_654_00_0); + public static final TransportVersion ML_INFERENCE_AZURE_OPENAI_COMPLETIONS = def(8_655_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index bea222a9d834..804dbfbb2dc4 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -64,6 +64,7 @@ import org.elasticsearch.common.logging.LogConfigurator; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateUtils; @@ -1058,6 +1059,11 @@ public static String randomAlphaOfLength(int codeUnits) { return RandomizedTest.randomAsciiOfLength(codeUnits); } + public static SecureString randomSecureStringOfLength(int codeUnits) { + var randomAlpha = randomAlphaOfLength(codeUnits); + return new SecureString(randomAlpha.toCharArray()); + } + public static String randomNullOrAlphaOfLength(int codeUnits) { return randomBoolean() ? null : randomAlphaOfLength(codeUnits); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreator.java index 39eaaceae08b..73ba286c9031 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionCreator.java @@ -10,6 +10,7 @@ import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModel; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModel; import java.util.Map; @@ -32,4 +33,10 @@ public ExecutableAction create(AzureOpenAiEmbeddingsModel model, Map taskSettings) { + var overriddenModel = AzureOpenAiCompletionModel.of(model, taskSettings); + return new AzureOpenAiCompletionAction(sender, overriddenModel, serviceComponents); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionVisitor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionVisitor.java index 49d1ce61b12d..f45c1d797085 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionVisitor.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiActionVisitor.java @@ -8,10 +8,13 @@ package org.elasticsearch.xpack.inference.external.action.azureopenai; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModel; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModel; import java.util.Map; public interface AzureOpenAiActionVisitor { ExecutableAction create(AzureOpenAiEmbeddingsModel model, Map taskSettings); + + ExecutableAction create(AzureOpenAiCompletionModel model, Map taskSettings); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionAction.java new file mode 100644 index 000000000000..d38d02ef9620 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionAction.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.azureopenai; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.http.sender.AzureOpenAiCompletionRequestManager; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; +import org.elasticsearch.xpack.inference.external.http.sender.InferenceInputs; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.services.ServiceComponents; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModel; + +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.createInternalServerError; +import static org.elasticsearch.xpack.inference.external.action.ActionUtils.wrapFailuresInElasticsearchException; + +public class AzureOpenAiCompletionAction implements ExecutableAction { + + private final String errorMessage; + private final AzureOpenAiCompletionRequestManager requestCreator; + private final Sender sender; + + public AzureOpenAiCompletionAction(Sender sender, AzureOpenAiCompletionModel model, ServiceComponents serviceComponents) { + Objects.requireNonNull(serviceComponents); + Objects.requireNonNull(model); + this.sender = Objects.requireNonNull(sender); + this.requestCreator = new AzureOpenAiCompletionRequestManager(model, serviceComponents.threadPool()); + this.errorMessage = constructFailedToSendRequestMessage(model.getUri(), "Azure OpenAI completion"); + } + + @Override + public void execute(InferenceInputs inferenceInputs, TimeValue timeout, ActionListener listener) { + if (inferenceInputs instanceof DocumentsOnlyInput == false) { + listener.onFailure(new ElasticsearchStatusException("Invalid inference input type", RestStatus.INTERNAL_SERVER_ERROR)); + return; + } + + var docsOnlyInput = (DocumentsOnlyInput) inferenceInputs; + if (docsOnlyInput.getInputs().size() > 1) { + listener.onFailure(new ElasticsearchStatusException("Azure OpenAI completion only accepts 1 input", RestStatus.BAD_REQUEST)); + return; + } + + try { + ActionListener wrappedListener = wrapFailuresInElasticsearchException(errorMessage, listener); + + sender.send(requestCreator, inferenceInputs, timeout, wrappedListener); + } catch (ElasticsearchException e) { + listener.onFailure(e); + } catch (Exception e) { + listener.onFailure(createInternalServerError(e, errorMessage)); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiCompletionRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiCompletionRequestManager.java new file mode 100644 index 000000000000..2811155f6f35 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/AzureOpenAiCompletionRequestManager.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import org.apache.http.client.protocol.HttpClientContext; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.external.azureopenai.AzureOpenAiResponseHandler; +import org.elasticsearch.xpack.inference.external.http.retry.RequestSender; +import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiCompletionRequest; +import org.elasticsearch.xpack.inference.external.response.azureopenai.AzureOpenAiCompletionResponseEntity; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModel; + +import java.util.List; +import java.util.Objects; +import java.util.function.Supplier; + +public class AzureOpenAiCompletionRequestManager extends AzureOpenAiRequestManager { + + private static final Logger logger = LogManager.getLogger(AzureOpenAiCompletionRequestManager.class); + + private static final ResponseHandler HANDLER = createCompletionHandler(); + + private final AzureOpenAiCompletionModel model; + + private static ResponseHandler createCompletionHandler() { + return new AzureOpenAiResponseHandler("azure openai completion", AzureOpenAiCompletionResponseEntity::fromResponse); + } + + public AzureOpenAiCompletionRequestManager(AzureOpenAiCompletionModel model, ThreadPool threadPool) { + super(threadPool, model); + this.model = Objects.requireNonNull(model); + } + + @Override + public Runnable create( + @Nullable String query, + List input, + RequestSender requestSender, + Supplier hasRequestCompletedFunction, + HttpClientContext context, + ActionListener listener + ) { + AzureOpenAiCompletionRequest request = new AzureOpenAiCompletionRequest(input, model); + return new ExecutableInferenceRequest(requestSender, logger, request, context, HANDLER, hasRequestCompletedFunction, listener); + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequest.java new file mode 100644 index 000000000000..8854dc795036 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequest.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.azureopenai; + +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.ByteArrayEntity; +import org.elasticsearch.common.Strings; +import org.elasticsearch.xpack.inference.external.request.HttpRequest; +import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModel; + +import java.net.URI; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Objects; + +public class AzureOpenAiCompletionRequest implements AzureOpenAiRequest { + + private final List input; + + private final URI uri; + + private final AzureOpenAiCompletionModel model; + + public AzureOpenAiCompletionRequest(List input, AzureOpenAiCompletionModel model) { + this.input = input; + this.model = Objects.requireNonNull(model); + this.uri = model.getUri(); + } + + @Override + public HttpRequest createHttpRequest() { + var httpPost = new HttpPost(uri); + var requestEntity = Strings.toString(new AzureOpenAiCompletionRequestEntity(input, model.getTaskSettings().user())); + + ByteArrayEntity byteEntity = new ByteArrayEntity(requestEntity.getBytes(StandardCharsets.UTF_8)); + httpPost.setEntity(byteEntity); + + AzureOpenAiRequest.decorateWithAuthHeader(httpPost, model.getSecretSettings()); + + return new HttpRequest(httpPost, getInferenceEntityId()); + } + + @Override + public URI getURI() { + return this.uri; + } + + @Override + public String getInferenceEntityId() { + return model.getInferenceEntityId(); + } + + @Override + public Request truncate() { + // No truncation for Azure OpenAI completion + return this; + } + + @Override + public boolean[] getTruncationInfo() { + // No truncation for Azure OpenAI completion + return null; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequestEntity.java new file mode 100644 index 000000000000..86614ef32855 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiCompletionRequestEntity.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.azureopenai; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +public record AzureOpenAiCompletionRequestEntity(List messages, @Nullable String user) implements ToXContentObject { + + private static final String NUMBER_OF_RETURNED_CHOICES_FIELD = "n"; + + private static final String MESSAGES_FIELD = "messages"; + + private static final String ROLE_FIELD = "role"; + + private static final String CONTENT_FIELD = "content"; + + private static final String USER_FIELD = "user"; + + public AzureOpenAiCompletionRequestEntity { + Objects.requireNonNull(messages); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.startArray(MESSAGES_FIELD); + + { + for (String message : messages) { + builder.startObject(); + + { + builder.field(ROLE_FIELD, USER_FIELD); + builder.field(CONTENT_FIELD, message); + } + + builder.endObject(); + } + } + + builder.endArray(); + + builder.field(NUMBER_OF_RETURNED_CHOICES_FIELD, 1); + + if (Strings.isNullOrEmpty(user) == false) { + builder.field(USER_FIELD, user); + } + + builder.endObject(); + return builder; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequest.java index f20398fec0e5..00af244fca91 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequest.java @@ -7,13 +7,9 @@ package org.elasticsearch.xpack.inference.external.request.azureopenai; -import org.apache.http.HttpHeaders; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.ByteArrayEntity; -import org.apache.http.message.BasicHeader; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.ValidationException; -import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.external.request.HttpRequest; import org.elasticsearch.xpack.inference.external.request.Request; @@ -23,14 +19,7 @@ import java.nio.charset.StandardCharsets; import java.util.Objects; -import static org.elasticsearch.xpack.inference.external.request.RequestUtils.createAuthBearerHeader; -import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; -import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings.API_KEY; -import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings.ENTRA_ID; - public class AzureOpenAiEmbeddingsRequest implements AzureOpenAiRequest { - private static final String MISSING_AUTHENTICATION_ERROR_MESSAGE = - "The request does not have any authentication methods set. One of [%s] or [%s] is required."; private final Truncator truncator; private final Truncator.TruncationResult truncationResult; @@ -59,21 +48,7 @@ public HttpRequest createHttpRequest() { ByteArrayEntity byteEntity = new ByteArrayEntity(requestEntity.getBytes(StandardCharsets.UTF_8)); httpPost.setEntity(byteEntity); - httpPost.setHeader(new BasicHeader(HttpHeaders.CONTENT_TYPE, XContentType.JSON.mediaType())); - - var entraId = model.getSecretSettings().entraId(); - var apiKey = model.getSecretSettings().apiKey(); - - if (entraId != null && entraId.isEmpty() == false) { - httpPost.setHeader(createAuthBearerHeader(entraId)); - } else if (apiKey != null && apiKey.isEmpty() == false) { - httpPost.setHeader(new BasicHeader(API_KEY_HEADER, apiKey.toString())); - } else { - // should never happen due to the checks on the secret settings, but just in case - ValidationException validationException = new ValidationException(); - validationException.addValidationError(Strings.format(MISSING_AUTHENTICATION_ERROR_MESSAGE, API_KEY, ENTRA_ID)); - throw validationException; - } + AzureOpenAiRequest.decorateWithAuthHeader(httpPost, model.getSecretSettings()); return new HttpRequest(httpPost, getInferenceEntityId()); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequest.java index edb7c70b3903..79a0e4a4eba3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequest.java @@ -7,6 +7,40 @@ package org.elasticsearch.xpack.inference.external.request.azureopenai; +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.message.BasicHeader; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; -public interface AzureOpenAiRequest extends Request {} +import static org.elasticsearch.xpack.inference.external.request.RequestUtils.createAuthBearerHeader; +import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings.API_KEY; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings.ENTRA_ID; + +public interface AzureOpenAiRequest extends Request { + + String MISSING_AUTHENTICATION_ERROR_MESSAGE = + "The request does not have any authentication methods set. One of [%s] or [%s] is required."; + + static void decorateWithAuthHeader(HttpPost httpPost, AzureOpenAiSecretSettings secretSettings) { + httpPost.setHeader(new BasicHeader(HttpHeaders.CONTENT_TYPE, XContentType.JSON.mediaType())); + + var entraId = secretSettings.entraId(); + var apiKey = secretSettings.apiKey(); + + if (entraId != null && entraId.isEmpty() == false) { + httpPost.setHeader(createAuthBearerHeader(entraId)); + } else if (apiKey != null && apiKey.isEmpty() == false) { + httpPost.setHeader(new BasicHeader(API_KEY_HEADER, apiKey.toString())); + } else { + // should never happen due to the checks on the secret settings, but just in case + ValidationException validationException = new ValidationException(); + validationException.addValidationError(Strings.format(MISSING_AUTHENTICATION_ERROR_MESSAGE, API_KEY, ENTRA_ID)); + throw validationException; + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiUtils.java index 16a02a4c06c1..6e657640e27e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiUtils.java @@ -13,6 +13,8 @@ public class AzureOpenAiUtils { public static final String OPENAI_PATH = "openai"; public static final String DEPLOYMENTS_PATH = "deployments"; public static final String EMBEDDINGS_PATH = "embeddings"; + public static final String CHAT_PATH = "chat"; + public static final String COMPLETIONS_PATH = "completions"; public static final String API_VERSION_PARAMETER = "api-version"; public static final String API_KEY_HEADER = "api-key"; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java index 42fd0ddc812e..55a7f35710cf 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/XContentUtils.java @@ -39,7 +39,7 @@ public static void moveToFirstToken(XContentParser parser) throws IOException { public static void positionParserAtTokenAfterField(XContentParser parser, String field, String errorMsgTemplate) throws IOException { XContentParser.Token token = parser.nextToken(); - while (token != null && token != XContentParser.Token.END_OBJECT) { + while (token != null) { if (token == XContentParser.Token.FIELD_NAME && parser.currentName().equals(field)) { parser.nextToken(); return; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntity.java new file mode 100644 index 000000000000..ca1df7027cb4 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntity.java @@ -0,0 +1,114 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.azureopenai; + +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.Request; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.moveToFirstToken; +import static org.elasticsearch.xpack.inference.external.response.XContentUtils.positionParserAtTokenAfterField; + +public class AzureOpenAiCompletionResponseEntity { + + private static final String FAILED_TO_FIND_FIELD_TEMPLATE = "Failed to find required field [%s] in Azure OpenAI completions response"; + + /** + * Parses the Azure OpenAI completion response. + * For a request like: + * + *
+     *     
+     *         {
+     *             "inputs": "Please summarize this text: some text"
+     *         }
+     *     
+     * 
+ * + * The response would look like: + * + *
+     *     
+     *         {
+     *     "choices": [
+     *         {
+     *             "content_filter_results": {
+     *                 "hate": { ... },
+     *                 "self_harm": { ... },
+     *                 "sexual": { ... },
+     *                 "violence": { ... }
+     *             },
+     *             "finish_reason": "stop",
+     *             "index": 0,
+     *             "logprobs": null,
+     *             "message": {
+     *                 "content": "response",
+     *                 "role": "assistant"
+     *             }
+     *         }
+     *     ],
+     *     "created": 1714982782,
+     *     "id": "...",
+     *     "model": "gpt-4",
+     *     "object": "chat.completion",
+     *     "prompt_filter_results": [
+     *         {
+     *             "prompt_index": 0,
+     *             "content_filter_results": {
+     *                 "hate": { ... },
+     *                 "self_harm": { ... },
+     *                 "sexual": { ... },
+     *                 "violence": { ... }
+     *             }
+     *         }
+     *     ],
+     *     "system_fingerprint": null,
+     *     "usage": { ... }
+     * }
+     *     
+     * 
+ */ + public static ChatCompletionResults fromResponse(Request request, HttpResult response) throws IOException { + var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); + try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { + moveToFirstToken(jsonParser); + + XContentParser.Token token = jsonParser.currentToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + + positionParserAtTokenAfterField(jsonParser, "choices", FAILED_TO_FIND_FIELD_TEMPLATE); + + jsonParser.nextToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, jsonParser.currentToken(), jsonParser); + + positionParserAtTokenAfterField(jsonParser, "message", FAILED_TO_FIND_FIELD_TEMPLATE); + + token = jsonParser.currentToken(); + + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, jsonParser); + + positionParserAtTokenAfterField(jsonParser, "content", FAILED_TO_FIND_FIELD_TEMPLATE); + + XContentParser.Token contentToken = jsonParser.currentToken(); + ensureExpectedToken(XContentParser.Token.VALUE_STRING, contentToken, jsonParser); + String content = jsonParser.text(); + + return new ChatCompletionResults(List.of(new ChatCompletionResults.Result(content))); + } + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiModel.java index 5e50229e2564..708088af54cc 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiModel.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.inference.services.azureopenai; +import org.apache.http.client.utils.URIBuilder; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; @@ -14,11 +15,18 @@ import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.action.azureopenai.AzureOpenAiActionVisitor; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils; import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; import java.util.Map; import java.util.Objects; +import static org.elasticsearch.core.Strings.format; + public abstract class AzureOpenAiModel extends Model { protected URI uri; @@ -50,6 +58,30 @@ protected AzureOpenAiModel(AzureOpenAiModel model, ServiceSettings serviceSettin public abstract ExecutableAction accept(AzureOpenAiActionVisitor creator, Map taskSettings); + public final URI buildUriString() throws URISyntaxException { + return AzureOpenAiModel.buildUri(resourceName(), deploymentId(), apiVersion(), operationPathSegments()); + } + + // use only for testing directly + public static URI buildUri(String resourceName, String deploymentId, String apiVersion, String... pathSegments) + throws URISyntaxException { + String hostname = format("%s.%s", resourceName, AzureOpenAiUtils.HOST_SUFFIX); + + return new URIBuilder().setScheme("https") + .setHost(hostname) + .setPathSegments(createPathSegmentsList(deploymentId, pathSegments)) + .addParameter(AzureOpenAiUtils.API_VERSION_PARAMETER, apiVersion) + .build(); + } + + private static List createPathSegmentsList(String deploymentId, String[] pathSegments) { + List pathSegmentsList = new ArrayList<>( + List.of(AzureOpenAiUtils.OPENAI_PATH, AzureOpenAiUtils.DEPLOYMENTS_PATH, deploymentId) + ); + pathSegmentsList.addAll(Arrays.asList(pathSegments)); + return pathSegmentsList; + } + public URI getUri() { return uri; } @@ -62,4 +94,13 @@ public void setUri(URI newUri) { public AzureOpenAiRateLimitServiceSettings rateLimitServiceSettings() { return rateLimitServiceSettings; } + + // TODO: can be inferred directly from modelConfigurations.getServiceSettings(); will be addressed with separate refactoring + public abstract String resourceName(); + + public abstract String deploymentId(); + + public abstract String apiVersion(); + + public abstract String[] operationPathSegments(); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettings.java index f871fe6c080a..48e45f368bfe 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettings.java @@ -25,12 +25,16 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalSecureString; -public record AzureOpenAiSecretSettings(@Nullable SecureString apiKey, @Nullable SecureString entraId) implements SecretSettings { +public class AzureOpenAiSecretSettings implements SecretSettings { public static final String NAME = "azure_openai_secret_settings"; public static final String API_KEY = "api_key"; public static final String ENTRA_ID = "entra_id"; + private final SecureString entraId; + + private final SecureString apiKey; + public static AzureOpenAiSecretSettings fromMap(@Nullable Map map) { if (map == null) { return null; @@ -59,14 +63,24 @@ public static AzureOpenAiSecretSettings fromMap(@Nullable Map ma return new AzureOpenAiSecretSettings(secureApiToken, secureEntraId); } - public AzureOpenAiSecretSettings { + public AzureOpenAiSecretSettings(@Nullable SecureString apiKey, @Nullable SecureString entraId) { Objects.requireNonNullElse(apiKey, entraId); + this.apiKey = apiKey; + this.entraId = entraId; } public AzureOpenAiSecretSettings(StreamInput in) throws IOException { this(in.readOptionalSecureString(), in.readOptionalSecureString()); } + public SecureString apiKey() { + return apiKey; + } + + public SecureString entraId() { + return entraId; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -98,4 +112,17 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalSecureString(apiKey); out.writeOptionalSecureString(entraId); } + + @Override + public boolean equals(Object object) { + if (this == object) return true; + if (object == null || getClass() != object.getClass()) return false; + AzureOpenAiSecretSettings that = (AzureOpenAiSecretSettings) object; + return Objects.equals(entraId, that.entraId) && Objects.equals(apiKey, that.apiKey); + } + + @Override + public int hashCode() { + return Objects.hash(entraId, apiKey); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java index c6b97e22b099..e0e48ab20a86 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java @@ -35,6 +35,7 @@ import org.elasticsearch.xpack.inference.services.SenderService; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.ServiceUtils; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModel; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModel; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsServiceSettings; @@ -121,19 +122,23 @@ private static AzureOpenAiModel createModel( String failureMessage, ConfigurationParseContext context ) { - if (taskType == TaskType.TEXT_EMBEDDING) { - return new AzureOpenAiEmbeddingsModel( - inferenceEntityId, - taskType, - NAME, - serviceSettings, - taskSettings, - secretSettings, - context - ); + switch (taskType) { + case TEXT_EMBEDDING -> { + return new AzureOpenAiEmbeddingsModel( + inferenceEntityId, + taskType, + NAME, + serviceSettings, + taskSettings, + secretSettings, + context + ); + } + case COMPLETION -> { + return new AzureOpenAiCompletionModel(inferenceEntityId, taskType, NAME, serviceSettings, taskSettings, secretSettings); + } + default -> throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); } - - throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModel.java new file mode 100644 index 000000000000..05cb66345354 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModel.java @@ -0,0 +1,121 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ModelSecrets; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.inference.external.action.ExecutableAction; +import org.elasticsearch.xpack.inference.external.action.azureopenai.AzureOpenAiActionVisitor; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiModel; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; + +import java.net.URISyntaxException; +import java.util.Map; + +public class AzureOpenAiCompletionModel extends AzureOpenAiModel { + + public static AzureOpenAiCompletionModel of(AzureOpenAiCompletionModel model, Map taskSettings) { + if (taskSettings == null || taskSettings.isEmpty()) { + return model; + } + + var requestTaskSettings = AzureOpenAiCompletionRequestTaskSettings.fromMap(taskSettings); + return new AzureOpenAiCompletionModel(model, AzureOpenAiCompletionTaskSettings.of(model.getTaskSettings(), requestTaskSettings)); + } + + public AzureOpenAiCompletionModel( + String inferenceEntityId, + TaskType taskType, + String service, + Map serviceSettings, + Map taskSettings, + @Nullable Map secrets + ) { + this( + inferenceEntityId, + taskType, + service, + AzureOpenAiCompletionServiceSettings.fromMap(serviceSettings), + AzureOpenAiCompletionTaskSettings.fromMap(taskSettings), + AzureOpenAiSecretSettings.fromMap(secrets) + ); + } + + // Should only be used directly for testing + AzureOpenAiCompletionModel( + String inferenceEntityId, + TaskType taskType, + String service, + AzureOpenAiCompletionServiceSettings serviceSettings, + AzureOpenAiCompletionTaskSettings taskSettings, + @Nullable AzureOpenAiSecretSettings secrets + ) { + super( + new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings), + new ModelSecrets(secrets), + serviceSettings + ); + try { + this.uri = buildUriString(); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + public AzureOpenAiCompletionModel(AzureOpenAiCompletionModel originalModel, AzureOpenAiCompletionServiceSettings serviceSettings) { + super(originalModel, serviceSettings); + } + + private AzureOpenAiCompletionModel(AzureOpenAiCompletionModel originalModel, AzureOpenAiCompletionTaskSettings taskSettings) { + super(originalModel, taskSettings); + } + + @Override + public AzureOpenAiCompletionServiceSettings getServiceSettings() { + return (AzureOpenAiCompletionServiceSettings) super.getServiceSettings(); + } + + @Override + public AzureOpenAiCompletionTaskSettings getTaskSettings() { + return (AzureOpenAiCompletionTaskSettings) super.getTaskSettings(); + } + + @Override + public AzureOpenAiSecretSettings getSecretSettings() { + return (AzureOpenAiSecretSettings) super.getSecretSettings(); + } + + @Override + public ExecutableAction accept(AzureOpenAiActionVisitor creator, Map taskSettings) { + return creator.create(this, taskSettings); + } + + @Override + public String resourceName() { + return getServiceSettings().resourceName(); + } + + @Override + public String deploymentId() { + return getServiceSettings().deploymentId(); + } + + @Override + public String apiVersion() { + return getServiceSettings().apiVersion(); + } + + @Override + public String[] operationPathSegments() { + return new String[] { AzureOpenAiUtils.CHAT_PATH, AzureOpenAiUtils.COMPLETIONS_PATH }; + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettings.java new file mode 100644 index 000000000000..5dd42bb1b911 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettings.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; + +import java.util.Map; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields.USER; + +public record AzureOpenAiCompletionRequestTaskSettings(@Nullable String user) { + + public static final AzureOpenAiCompletionRequestTaskSettings EMPTY_SETTINGS = new AzureOpenAiCompletionRequestTaskSettings(null); + + public static AzureOpenAiCompletionRequestTaskSettings fromMap(Map map) { + if (map.isEmpty()) { + return AzureOpenAiCompletionRequestTaskSettings.EMPTY_SETTINGS; + } + + ValidationException validationException = new ValidationException(); + + String user = extractOptionalString(map, USER, ModelConfigurations.TASK_SETTINGS, validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new AzureOpenAiCompletionRequestTaskSettings(user); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettings.java new file mode 100644 index 000000000000..4100ce7358a3 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettings.java @@ -0,0 +1,183 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiRateLimitServiceSettings; +import org.elasticsearch.xpack.inference.services.settings.FilteredXContentObject; +import org.elasticsearch.xpack.inference.services.settings.RateLimitSettings; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields.API_VERSION; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields.DEPLOYMENT_ID; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields.RESOURCE_NAME; + +public class AzureOpenAiCompletionServiceSettings extends FilteredXContentObject + implements + ServiceSettings, + AzureOpenAiRateLimitServiceSettings { + + public static final String NAME = "azure_openai_completions_service_settings"; + + /** + * Rate limit documentation can be found here: + * + * Limits per region per model id + * https://learn.microsoft.com/en-us/azure/ai-services/openai/quotas-limits + * + * How to change the limits + * https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/quota?tabs=rest + * + * Blog giving some examples + * https://techcommunity.microsoft.com/t5/fasttrack-for-azure/optimizing-azure-openai-a-guide-to-limits-quotas-and-best/ba-p/4076268 + * + * According to the docs 1000 tokens per minute (TPM) = 6 requests per minute (RPM). The limits change depending on the region + * and model. The lowest chat completions limit is 20k TPM, so we'll default to that. + * Calculation: 20K TPM = 20 * 6 = 120 requests per minute (used `francecentral` and `gpt-4` as basis for the calculation). + */ + private static final RateLimitSettings DEFAULT_RATE_LIMIT_SETTINGS = new RateLimitSettings(120); + + public static AzureOpenAiCompletionServiceSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + + var settings = fromMap(map, validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new AzureOpenAiCompletionServiceSettings(settings); + } + + private static AzureOpenAiCompletionServiceSettings.CommonFields fromMap( + Map map, + ValidationException validationException + ) { + String resourceName = extractRequiredString(map, RESOURCE_NAME, ModelConfigurations.SERVICE_SETTINGS, validationException); + String deploymentId = extractRequiredString(map, DEPLOYMENT_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); + String apiVersion = extractRequiredString(map, API_VERSION, ModelConfigurations.SERVICE_SETTINGS, validationException); + RateLimitSettings rateLimitSettings = RateLimitSettings.of(map, DEFAULT_RATE_LIMIT_SETTINGS, validationException); + + return new AzureOpenAiCompletionServiceSettings.CommonFields(resourceName, deploymentId, apiVersion, rateLimitSettings); + } + + private record CommonFields(String resourceName, String deploymentId, String apiVersion, RateLimitSettings rateLimitSettings) {} + + private final String resourceName; + private final String deploymentId; + private final String apiVersion; + + private final RateLimitSettings rateLimitSettings; + + public AzureOpenAiCompletionServiceSettings( + String resourceName, + String deploymentId, + String apiVersion, + @Nullable RateLimitSettings rateLimitSettings + ) { + this.resourceName = resourceName; + this.deploymentId = deploymentId; + this.apiVersion = apiVersion; + this.rateLimitSettings = Objects.requireNonNullElse(rateLimitSettings, DEFAULT_RATE_LIMIT_SETTINGS); + } + + public AzureOpenAiCompletionServiceSettings(StreamInput in) throws IOException { + resourceName = in.readString(); + deploymentId = in.readString(); + apiVersion = in.readString(); + rateLimitSettings = new RateLimitSettings(in); + } + + private AzureOpenAiCompletionServiceSettings(AzureOpenAiCompletionServiceSettings.CommonFields fields) { + this(fields.resourceName, fields.deploymentId, fields.apiVersion, fields.rateLimitSettings); + } + + public String resourceName() { + return resourceName; + } + + public String deploymentId() { + return deploymentId; + } + + @Override + public RateLimitSettings rateLimitSettings() { + return DEFAULT_RATE_LIMIT_SETTINGS; + } + + public String apiVersion() { + return apiVersion; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + + toXContentFragmentOfExposedFields(builder, params); + rateLimitSettings.toXContent(builder, params); + + builder.endObject(); + return builder; + } + + @Override + protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.field(RESOURCE_NAME, resourceName); + builder.field(DEPLOYMENT_ID, deploymentId); + builder.field(API_VERSION, apiVersion); + + return builder; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_AZURE_OPENAI_COMPLETIONS; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(resourceName); + out.writeString(deploymentId); + out.writeString(apiVersion); + rateLimitSettings.writeTo(out); + } + + @Override + public boolean equals(Object object) { + if (this == object) return true; + if (object == null || getClass() != object.getClass()) return false; + AzureOpenAiCompletionServiceSettings that = (AzureOpenAiCompletionServiceSettings) object; + return Objects.equals(resourceName, that.resourceName) + && Objects.equals(deploymentId, that.deploymentId) + && Objects.equals(apiVersion, that.apiVersion) + && Objects.equals(rateLimitSettings, that.rateLimitSettings); + } + + @Override + public int hashCode() { + return Objects.hash(resourceName, deploymentId, apiVersion, rateLimitSettings); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettings.java new file mode 100644 index 000000000000..6e9f77e1ade2 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettings.java @@ -0,0 +1,105 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.TaskSettings; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; + +public class AzureOpenAiCompletionTaskSettings implements TaskSettings { + + public static final String NAME = "azure_openai_completion_task_settings"; + + public static final String USER = "user"; + + public static AzureOpenAiCompletionTaskSettings fromMap(Map map) { + ValidationException validationException = new ValidationException(); + + String user = extractOptionalString(map, USER, ModelConfigurations.TASK_SETTINGS, validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + return new AzureOpenAiCompletionTaskSettings(user); + } + + private final String user; + + public static AzureOpenAiCompletionTaskSettings of( + AzureOpenAiCompletionTaskSettings originalSettings, + AzureOpenAiCompletionRequestTaskSettings requestSettings + ) { + var userToUse = requestSettings.user() == null ? originalSettings.user : requestSettings.user(); + return new AzureOpenAiCompletionTaskSettings(userToUse); + } + + public AzureOpenAiCompletionTaskSettings(@Nullable String user) { + this.user = user; + } + + public AzureOpenAiCompletionTaskSettings(StreamInput in) throws IOException { + this.user = in.readOptionalString(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + if (user != null) { + builder.field(USER, user); + } + } + builder.endObject(); + return builder; + } + + public String user() { + return user; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_AZURE_OPENAI_COMPLETIONS; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalString(user); + } + + @Override + public boolean equals(Object object) { + if (this == object) return true; + if (object == null || getClass() != object.getClass()) return false; + AzureOpenAiCompletionTaskSettings that = (AzureOpenAiCompletionTaskSettings) object; + return Objects.equals(user, that.user); + } + + @Override + public int hashCode() { + return Objects.hash(user); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModel.java index 93d1e31a3bed..377bb33f5861 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModel.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.services.azureopenai.embeddings; -import org.apache.http.client.utils.URIBuilder; import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; @@ -19,12 +18,9 @@ import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiModel; import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; -import java.net.URI; import java.net.URISyntaxException; import java.util.Map; -import static org.elasticsearch.core.Strings.format; - public class AzureOpenAiEmbeddingsModel extends AzureOpenAiModel { public static AzureOpenAiEmbeddingsModel of(AzureOpenAiEmbeddingsModel model, Map taskSettings) { @@ -70,7 +66,7 @@ public AzureOpenAiEmbeddingsModel( serviceSettings ); try { - this.uri = getEmbeddingsUri(serviceSettings.resourceName(), serviceSettings.deploymentId(), serviceSettings.apiVersion()); + this.uri = buildUriString(); } catch (URISyntaxException e) { throw new RuntimeException(e); } @@ -104,17 +100,24 @@ public ExecutableAction accept(AzureOpenAiActionVisitor creator, Map requestMap, List input, @Nullable String user) { + public void testInfer_AzureOpenAiCompletion_WithOverriddenUser() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion" + }"""; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var originalUser = "original_user"; + var overriddenUser = "overridden_user"; + var apiKey = "api_key"; + var completionInput = "some input"; + + var model = createCompletionModel("resource", "deployment", "apiversion", originalUser, apiKey, null, "id"); + model.setUri(new URI(getUrl(webServer))); + var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); + var taskSettingsWithUserOverride = createRequestTaskSettingsMap(overriddenUser); + var action = (AzureOpenAiCompletionAction) actionCreator.create(model, taskSettingsWithUserOverride); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of(completionInput)), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat(webServer.requests(), hasSize(1)); + + var request = webServer.requests().get(0); + var requestMap = entityAsMap(request.getBody()); + + assertThat( + result.asMap(), + is(Map.of(ChatCompletionResults.COMPLETION, List.of(Map.of(ChatCompletionResults.Result.RESULT, "response")))) + ); + validateRequestWithApiKey(request, apiKey); + validateCompletionRequestMapWithUser(requestMap, List.of(completionInput), overriddenUser); + + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + public void testInfer_AzureOpenAiCompletionModel_WithoutUser() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion" + }"""; + + var completionInput = "some input"; + var apiKey = "api key"; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = createCompletionModel("resource", "deployment", "apiversion", null, apiKey, null, "id"); + model.setUri(new URI(getUrl(webServer))); + var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); + var requestTaskSettingsWithoutUser = createRequestTaskSettingsMap(null); + var action = (AzureOpenAiCompletionAction) actionCreator.create(model, requestTaskSettingsWithoutUser); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of(completionInput)), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat(webServer.requests(), hasSize(1)); + + var request = webServer.requests().get(0); + var requestMap = entityAsMap(request.getBody()); + + assertThat( + result.asMap(), + is(Map.of(ChatCompletionResults.COMPLETION, List.of(Map.of(ChatCompletionResults.Result.RESULT, "response")))) + ); + validateRequestWithApiKey(request, apiKey); + validateCompletionRequestMapWithUser(requestMap, List.of(completionInput), null); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + public void testInfer_AzureOpenAiCompletionModel_FailsFromInvalidResponseFormat() throws IOException { + // timeout as zero for no retries + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager, ZERO_TIMEOUT_SETTINGS); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + // "choices" missing + String responseJson = """ + { + "not_choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion" + }"""; + + var completionInput = "some input"; + var apiKey = "api key"; + var userOverride = "overridden_user"; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = createCompletionModel("resource", "deployment", "apiversion", null, apiKey, null, "id"); + model.setUri(new URI(getUrl(webServer))); + var actionCreator = new AzureOpenAiActionCreator(sender, createWithEmptySettings(threadPool)); + var requestTaskSettingsWithoutUser = createRequestTaskSettingsMap(userOverride); + var action = (AzureOpenAiCompletionAction) actionCreator.create(model, requestTaskSettingsWithoutUser); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of(completionInput)), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); + assertThat( + thrownException.getMessage(), + is(format("Failed to send Azure OpenAI completion request to [%s]", getUrl(webServer))) + ); + assertThat( + thrownException.getCause().getMessage(), + is("Failed to find required field [choices] in Azure OpenAI completions response") + ); + + assertThat(webServer.requests(), hasSize(1)); + validateRequestWithApiKey(webServer.requests().get(0), apiKey); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + validateCompletionRequestMapWithUser(requestMap, List.of(completionInput), userOverride); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + private void validateEmbeddingsRequestMapWithUser(Map requestMap, List input, @Nullable String user) { var expectedSize = user == null ? 1 : 2; assertThat(requestMap.size(), is(expectedSize)); @@ -446,6 +621,24 @@ private void validateRequestMapWithUser(Map requestMap, List requestMap, List input, @Nullable String user) { + assertThat("input for completions can only be of size 1", input.size(), equalTo(1)); + + var expectedSize = user == null ? 2 : 3; + + assertThat(requestMap.size(), is(expectedSize)); + assertThat(getContentOfMessageInRequestMap(requestMap), is(input.get(0))); + + if (user != null) { + assertThat(requestMap.get("user"), is(user)); + } + } + + @SuppressWarnings("unchecked") + public static String getContentOfMessageInRequestMap(Map requestMap) { + return ((Map) ((List) requestMap.get("messages")).get(0)).get("content").toString(); + } + private void validateRequestWithApiKey(MockRequest request, String apiKey) { assertNull(request.getUri().getQuery()); assertThat(request.getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType())); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionActionTests.java new file mode 100644 index 000000000000..96127841c17a --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/azureopenai/AzureOpenAiCompletionActionTests.java @@ -0,0 +1,200 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.action.azureopenai; + +import org.apache.http.HttpHeaders; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.http.MockResponse; +import org.elasticsearch.test.http.MockWebServer; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; +import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderTests; +import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.external.action.azureopenai.AzureOpenAiActionCreatorTests.getContentOfMessageInRequestMap; +import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; +import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; +import static org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModelTests.createCompletionModel; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; + +public class AzureOpenAiCompletionActionTests extends ESTestCase { + + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + private final MockWebServer webServer = new MockWebServer(); + private ThreadPool threadPool; + private HttpClientManager clientManager; + + @Before + public void init() throws Exception { + webServer.start(); + threadPool = createThreadPool(inferenceUtilityPool()); + clientManager = HttpClientManager.create(Settings.EMPTY, threadPool, mockClusterServiceEmpty(), mock(ThrottlerManager.class)); + } + + @After + public void shutdown() throws IOException { + clientManager.close(); + terminate(threadPool); + webServer.close(); + } + + public void testExecute_ReturnsSuccessfulResponse() throws IOException { + var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion" + ] + }"""; + + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var user = "user"; + var apiKey = "api_key"; + var completionInput = "some input"; + + var action = createAction("resource", "deployment", "apiversion", user, apiKey, sender, "id"); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of(completionInput)), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat(webServer.requests(), hasSize(1)); + + var request = webServer.requests().get(0); + assertNull(request.getUri().getQuery()); + assertThat(request.getHeader(HttpHeaders.CONTENT_TYPE), is(XContentType.JSON.mediaType())); + assertThat(request.getHeader(AzureOpenAiUtils.API_KEY_HEADER), is(apiKey)); + + assertThat( + result.asMap(), + is(Map.of(ChatCompletionResults.COMPLETION, List.of(Map.of(ChatCompletionResults.Result.RESULT, "response")))) + ); + + var requestMap = entityAsMap(request.getBody()); + assertThat(requestMap.size(), is(3)); + assertThat(getContentOfMessageInRequestMap(requestMap), is(completionInput)); + assertThat(requestMap.get("user"), is(user)); + assertThat(requestMap.get("n"), is(1)); + } + } + + public void testExecute_ThrowsElasticsearchException() { + var sender = mock(Sender.class); + doThrow(new ElasticsearchException("failed")).when(sender).send(any(), any(), any(), any()); + + var action = createAction("resource", "deployment", "apiVersion", "user", "apikey", sender, "id"); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of("abc")), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is("failed")); + } + + public void testExecute_ThrowsElasticsearchException_WhenSenderOnFailureIsCalled() { + var sender = mock(Sender.class); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onFailure(new IllegalStateException("failed")); + + return Void.TYPE; + }).when(sender).send(any(), any(), any(), any()); + + var action = createAction("resource", "deployment", "apiVersion", "user", "apikey", sender, "id"); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of("abc")), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is(format("Failed to send Azure OpenAI completion request to [%s]", getUrl(webServer)))); + } + + public void testExecute_ThrowsException() { + var sender = mock(Sender.class); + doThrow(new IllegalArgumentException("failed")).when(sender).send(any(), any(), any(), any()); + + var action = createAction("resource", "deployment", "apiVersion", "user", "apikey", sender, "id"); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(new DocumentsOnlyInput(List.of("abc")), InferenceAction.Request.DEFAULT_TIMEOUT, listener); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat(thrownException.getMessage(), is(format("Failed to send Azure OpenAI completion request to [%s]", getUrl(webServer)))); + } + + private AzureOpenAiCompletionAction createAction( + String resourceName, + String deploymentId, + String apiVersion, + @Nullable String user, + String apiKey, + Sender sender, + String inferenceEntityId + ) { + try { + var model = createCompletionModel(resourceName, deploymentId, apiVersion, user, apiKey, null, inferenceEntityId); + model.setUri(new URI(getUrl(webServer))); + return new AzureOpenAiCompletionAction(sender, model, createWithEmptySettings(threadPool)); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequestTests.java new file mode 100644 index 000000000000..2d37f273e1de --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiRequestTests.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.azureopenai; + +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; + +import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiRequest.MISSING_AUTHENTICATION_ERROR_MESSAGE; +import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings.API_KEY; +import static org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings.ENTRA_ID; +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class AzureOpenAiRequestTests extends ESTestCase { + + public void testDecorateWithAuthHeader_apiKeyPresent() { + var apiKey = randomSecureStringOfLength(10); + var httpPost = new HttpPost(); + var secretSettings = new AzureOpenAiSecretSettings(apiKey, null); + + AzureOpenAiRequest.decorateWithAuthHeader(httpPost, secretSettings); + var apiKeyHeader = httpPost.getFirstHeader(API_KEY_HEADER); + + assertThat(apiKeyHeader.getValue(), equalTo(apiKey.toString())); + } + + public void testDecorateWithAuthHeader_entraIdPresent() { + var entraId = randomSecureStringOfLength(10); + var httpPost = new HttpPost(); + var secretSettings = new AzureOpenAiSecretSettings(null, entraId); + + AzureOpenAiRequest.decorateWithAuthHeader(httpPost, secretSettings); + var authHeader = httpPost.getFirstHeader(HttpHeaders.AUTHORIZATION); + + assertThat(authHeader.getValue(), equalTo("Bearer " + entraId)); + } + + public void testDecorateWithAuthHeader_entraIdAndApiKeyMissing_throwMissingAuthValidationException() { + var httpPost = new HttpPost(); + var secretSettingsMock = mock(AzureOpenAiSecretSettings.class); + + when(secretSettingsMock.entraId()).thenReturn(null); + when(secretSettingsMock.apiKey()).thenReturn(null); + + ValidationException exception = expectThrows( + ValidationException.class, + () -> AzureOpenAiRequest.decorateWithAuthHeader(httpPost, secretSettingsMock) + ); + assertTrue(exception.getMessage().contains(Strings.format(MISSING_AUTHENTICATION_ERROR_MESSAGE, API_KEY, ENTRA_ID))); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestEntityTests.java new file mode 100644 index 000000000000..7647a4983f4b --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestEntityTests.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.azureopenai.completion; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiCompletionRequestEntity; + +import java.io.IOException; +import java.util.List; + +import static org.hamcrest.CoreMatchers.is; + +public class AzureOpenAiCompletionRequestEntityTests extends ESTestCase { + + public void testXContent_WritesSingleMessage_DoesNotWriteUserWhenItIsNull() throws IOException { + var entity = new AzureOpenAiCompletionRequestEntity(List.of("input"), null); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"messages":[{"role":"user","content":"input"}],"n":1}""")); + } + + public void testXContent_WritesSingleMessage_WriteUserWhenItIsNull() throws IOException { + var entity = new AzureOpenAiCompletionRequestEntity(List.of("input"), "user"); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"messages":[{"role":"user","content":"input"}],"n":1,"user":"user"}""")); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestTests.java new file mode 100644 index 000000000000..048d4ea16d56 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/completion/AzureOpenAiCompletionRequestTests.java @@ -0,0 +1,100 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.azureopenai.completion; + +import org.apache.http.HttpHeaders; +import org.apache.http.client.methods.HttpPost; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiCompletionRequest; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionModelTests; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.xpack.inference.external.action.azureopenai.AzureOpenAiActionCreatorTests.getContentOfMessageInRequestMap; +import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; +import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; + +public class AzureOpenAiCompletionRequestTests extends ESTestCase { + + public void testCreateRequest_WithApiKeyDefined() throws IOException { + var input = "input"; + var user = "user"; + var apiKey = randomAlphaOfLength(10); + + var request = createRequest("resource", "deployment", "2024", apiKey, null, input, user); + var httpRequest = request.createHttpRequest(); + + assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + var httpPost = (HttpPost) httpRequest.httpRequestBase(); + + assertThat( + httpPost.getURI().toString(), + is("https://resource.openai.azure.com/openai/deployments/deployment/chat/completions?api-version=2024") + ); + + assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); + assertThat(httpPost.getLastHeader(API_KEY_HEADER).getValue(), is(apiKey)); + + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + assertThat(getContentOfMessageInRequestMap(requestMap), is(input)); + assertThat(requestMap.get("user"), is(user)); + assertThat(requestMap.get("n"), is(1)); + } + + public void testCreateRequest_WithEntraIdDefined() throws IOException { + var input = "input"; + var user = "user"; + var entraId = randomAlphaOfLength(10); + + var request = createRequest("resource", "deployment", "2024", null, entraId, input, user); + var httpRequest = request.createHttpRequest(); + + assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); + var httpPost = (HttpPost) httpRequest.httpRequestBase(); + + assertThat( + httpPost.getURI().toString(), + is("https://resource.openai.azure.com/openai/deployments/deployment/chat/completions?api-version=2024") + ); + + assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); + assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer " + entraId)); + + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + assertThat(getContentOfMessageInRequestMap(requestMap), is(input)); + assertThat(requestMap.get("user"), is(user)); + assertThat(requestMap.get("n"), is(1)); + } + + protected AzureOpenAiCompletionRequest createRequest( + String resource, + String deployment, + String apiVersion, + String apiKey, + String entraId, + String input, + String user + ) { + var completionModel = AzureOpenAiCompletionModelTests.createCompletionModel( + resource, + deployment, + apiVersion, + user, + apiKey, + entraId, + "id" + ); + + return new AzureOpenAiCompletionRequest(List.of(input), completionModel); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestEntityTests.java similarity index 96% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestEntityTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestEntityTests.java index 14283ed53eed..f732a01c893e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestEntityTests.java @@ -5,13 +5,14 @@ * 2.0. */ -package org.elasticsearch.xpack.inference.external.request.azureopenai; +package org.elasticsearch.xpack.inference.external.request.azureopenai.embeddings; import org.elasticsearch.common.Strings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiEmbeddingsRequestEntity; import java.io.IOException; import java.util.List; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTests.java similarity index 73% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTests.java index 88e6880b72f0..bbd8a49d65f4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/AzureOpenAiEmbeddingsRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/azureopenai/embeddings/AzureOpenAiEmbeddingsRequestTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.inference.external.request.azureopenai; +package org.elasticsearch.xpack.inference.external.request.azureopenai.embeddings; import org.apache.http.HttpHeaders; import org.apache.http.client.methods.HttpPost; @@ -14,56 +14,69 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.common.TruncatorTests; -import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModel; +import org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiEmbeddingsRequest; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsModelTests; import java.io.IOException; -import java.net.URISyntaxException; import java.util.List; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.request.azureopenai.AzureOpenAiUtils.API_KEY_HEADER; import static org.hamcrest.Matchers.aMapWithSize; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; public class AzureOpenAiEmbeddingsRequestTests extends ESTestCase { - public void testCreateRequest_WithApiKeyDefined() throws IOException, URISyntaxException { - var request = createRequest("resource", "deployment", "apiVersion", "apikey", null, "abc", "user"); + + public void testCreateRequest_WithApiKeyDefined() throws IOException { + var input = "input"; + var user = "user"; + var apiKey = randomAlphaOfLength(10); + + var request = createRequest("resource", "deployment", "2024", apiKey, null, input, user); var httpRequest = request.createHttpRequest(); assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); var httpPost = (HttpPost) httpRequest.httpRequestBase(); - var expectedUri = AzureOpenAiEmbeddingsModel.getEmbeddingsUri("resource", "deployment", "apiVersion").toString(); - assertThat(httpPost.getURI().toString(), is(expectedUri)); + assertThat( + httpPost.getURI().toString(), + is("https://resource.openai.azure.com/openai/deployments/deployment/embeddings?api-version=2024") + ); assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); - assertThat(httpPost.getLastHeader(API_KEY_HEADER).getValue(), is("apikey")); + assertThat(httpPost.getLastHeader(API_KEY_HEADER).getValue(), is(apiKey)); var requestMap = entityAsMap(httpPost.getEntity().getContent()); - assertThat(requestMap, aMapWithSize(2)); - assertThat(requestMap.get("input"), is(List.of("abc"))); - assertThat(requestMap.get("user"), is("user")); + assertThat(requestMap.size(), equalTo(2)); + assertThat(requestMap.get("input"), is(List.of(input))); + assertThat(requestMap.get("user"), is(user)); } - public void testCreateRequest_WithEntraIdDefined() throws IOException, URISyntaxException { - var request = createRequest("resource", "deployment", "apiVersion", null, "entraId", "abc", "user"); + public void testCreateRequest_WithEntraIdDefined() throws IOException { + var input = "input"; + var user = "user"; + var entraId = randomAlphaOfLength(10); + + var request = createRequest("resource", "deployment", "2024", null, entraId, input, user); var httpRequest = request.createHttpRequest(); assertThat(httpRequest.httpRequestBase(), instanceOf(HttpPost.class)); var httpPost = (HttpPost) httpRequest.httpRequestBase(); - var expectedUri = AzureOpenAiEmbeddingsModel.getEmbeddingsUri("resource", "deployment", "apiVersion").toString(); - assertThat(httpPost.getURI().toString(), is(expectedUri)); + assertThat( + httpPost.getURI().toString(), + is("https://resource.openai.azure.com/openai/deployments/deployment/embeddings?api-version=2024") + ); assertThat(httpPost.getLastHeader(HttpHeaders.CONTENT_TYPE).getValue(), is(XContentType.JSON.mediaType())); - assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer entraId")); + assertThat(httpPost.getLastHeader(HttpHeaders.AUTHORIZATION).getValue(), is("Bearer " + entraId)); var requestMap = entityAsMap(httpPost.getEntity().getContent()); - assertThat(requestMap, aMapWithSize(2)); - assertThat(requestMap.get("input"), is(List.of("abc"))); - assertThat(requestMap.get("user"), is("user")); + assertThat(requestMap.size(), equalTo(2)); + assertThat(requestMap.get("input"), is(List.of(input))); + assertThat(requestMap.get("user"), is(user)); } public void testTruncate_ReducesInputTextSizeByHalf() throws IOException { @@ -87,7 +100,7 @@ public void testIsTruncated_ReturnsTrue() { assertTrue(truncatedRequest.getTruncationInfo()[0]); } - public static AzureOpenAiEmbeddingsRequest createRequest( + public AzureOpenAiEmbeddingsRequest createRequest( String resourceName, String deploymentId, String apiVersion, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java index 4f7cd9ea89a1..897c648eb942 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/XContentUtilsTests.java @@ -106,6 +106,24 @@ public void testPositionParserAtTokenAfterField_ThrowsWithMalformedJSON() throws } } + public void testPositionParserAtTokenAfterField_ConsumesUntilEnd() throws IOException { + var json = """ + { + "key": { + "foo": "bar" + }, + "target": "value" + } + """; + + var errorFormat = "Error: %s"; + + try (XContentParser parser = createParser(XContentType.JSON.xContent(), json)) { + XContentUtils.positionParserAtTokenAfterField(parser, "target", errorFormat); + assertEquals("value", parser.text()); + } + } + public void testConsumeUntilObjectEnd() throws IOException { var json = """ { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntityTests.java new file mode 100644 index 000000000000..3afe4bd439e0 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntityTests.java @@ -0,0 +1,220 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.response.azureopenai; + +import org.apache.http.HttpResponse; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.inference.results.ChatCompletionResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.Request; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + +public class AzureOpenAiCompletionResponseEntityTests extends ESTestCase { + + public void testFromResponse_CreatesResultsForASingleItem() throws IOException { + String responseJson = """ + { + "choices": [ + { + "content_filter_results": { + "hate": { + "filtered": false, + "severity": "safe" + }, + "self_harm": { + "filtered": false, + "severity": "safe" + }, + "sexual": { + "filtered": false, + "severity": "safe" + }, + "violence": { + "filtered": false, + "severity": "safe" + } + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion", + "prompt_filter_results": [ + { + "prompt_index": 0, + "content_filter_results": { + "hate": { + "filtered": false, + "severity": "safe" + }, + "self_harm": { + "filtered": false, + "severity": "safe" + }, + "sexual": { + "filtered": false, + "severity": "safe" + }, + "violence": { + "filtered": false, + "severity": "safe" + } + } + } + ], + "usage": { + "completion_tokens": 138, + "prompt_tokens": 11, + "total_tokens": 149 + } + }"""; + + ChatCompletionResults chatCompletionResults = AzureOpenAiCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat(chatCompletionResults.getResults().size(), equalTo(1)); + + ChatCompletionResults.Result result = chatCompletionResults.getResults().get(0); + assertThat(result.asMap().get(result.getResultsField()), is("response")); + } + + public void testFromResponse_FailsWhenChoicesFieldIsNotPresent() { + String responseJson = """ + { + "not_choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion" + }"""; + + var thrownException = expectThrows( + IllegalStateException.class, + () -> AzureOpenAiCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat(thrownException.getMessage(), is("Failed to find required field [choices] in Azure OpenAI completions response")); + } + + public void testFromResponse_FailsWhenChoicesFieldIsNotAnArray() { + String responseJson = """ + { + "choices": { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "response", + "role": "assistant" + } + }, + "model": "gpt-4", + "object": "chat.completion" + ] + }"""; + + var thrownException = expectThrows( + ParsingException.class, + () -> AzureOpenAiCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [START_OBJECT] but found [FIELD_NAME]") + ); + } + + public void testFromResponse_FailsWhenMessageDoesNotExist() { + String responseJson = """ + { + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "not_message": { + "content": "response", + "role": "assistant" + } + } + ], + "model": "gpt-4", + "object": "chat.completion" + }"""; + + var thrownException = expectThrows( + IllegalStateException.class, + () -> AzureOpenAiCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat(thrownException.getMessage(), is("Failed to find required field [message] in Azure OpenAI completions response")); + } + + public void testFromResponse_FailsWhenMessageValueIsAString() { + String responseJson = """ + { + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": "string" + } + ], + "model": "gpt-4", + "object": "chat.completion" + ] + }"""; + + var thrownException = expectThrows( + ParsingException.class, + () -> AzureOpenAiCompletionResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Failed to parse object: expecting token of type [START_OBJECT] but found [VALUE_STRING]") + ); + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java index 18f702014e2d..080602e8fd24 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java @@ -74,7 +74,7 @@ public void testFromResponse_FailsWhenChoicesFieldIsNotPresent() { }, "logprobs": null, "finish_reason": "stop" - }, + } ], "usage": { "prompt_tokens": 46, @@ -112,7 +112,7 @@ public void testFromResponse_FailsWhenChoicesFieldNotAnArray() { }, "logprobs": null, "finish_reason": "stop" - }, + } }, "usage": { "prompt_tokens": 46, @@ -153,7 +153,7 @@ public void testFromResponse_FailsWhenMessageDoesNotExist() { }, "logprobs": null, "finish_reason": "stop" - }, + } ], "usage": { "prompt_tokens": 46, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModelTests.java new file mode 100644 index 000000000000..93d948a5bdcf --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionModelTests.java @@ -0,0 +1,142 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields; + +import java.net.URISyntaxException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.sameInstance; + +public class AzureOpenAiCompletionModelTests extends ESTestCase { + + public void testOverrideWith_UpdatedTaskSettings_OverridesUser() { + var resource = "resource"; + var deploymentId = "deployment"; + var apiVersion = "api version"; + var apiKey = "api key"; + var entraId = "entra id"; + var inferenceEntityId = "inference entity id"; + + var user = "user"; + var userOverride = "user override"; + + var model = createCompletionModel(resource, deploymentId, apiVersion, user, apiKey, entraId, inferenceEntityId); + var requestTaskSettingsMap = taskSettingsMap(userOverride); + var overriddenModel = AzureOpenAiCompletionModel.of(model, requestTaskSettingsMap); + + assertThat( + overriddenModel, + equalTo(createCompletionModel(resource, deploymentId, apiVersion, userOverride, apiKey, entraId, inferenceEntityId)) + ); + } + + public void testOverrideWith_EmptyMap_OverridesNothing() { + var model = createCompletionModel("resource", "deployment", "api version", "user", "api key", "entra id", "inference entity id"); + var requestTaskSettingsMap = Map.of(); + var overriddenModel = AzureOpenAiCompletionModel.of(model, requestTaskSettingsMap); + + assertThat(overriddenModel, sameInstance(model)); + } + + public void testOverrideWith_NullMap_OverridesNothing() { + var model = createCompletionModel("resource", "deployment", "api version", "user", "api key", "entra id", "inference entity id"); + var overriddenModel = AzureOpenAiCompletionModel.of(model, null); + + assertThat(overriddenModel, sameInstance(model)); + } + + public void testOverrideWith_UpdatedServiceSettings_OverridesApiVersion() { + var resource = "resource"; + var deploymentId = "deployment"; + var apiKey = "api key"; + var user = "user"; + var entraId = "entra id"; + var inferenceEntityId = "inference entity id"; + + var apiVersion = "api version"; + var updatedApiVersion = "updated api version"; + + var updatedServiceSettings = new AzureOpenAiCompletionServiceSettings(resource, deploymentId, updatedApiVersion, null); + + var model = createCompletionModel(resource, deploymentId, apiVersion, user, apiKey, entraId, inferenceEntityId); + var overriddenModel = new AzureOpenAiCompletionModel(model, updatedServiceSettings); + + assertThat( + overriddenModel, + is(createCompletionModel(resource, deploymentId, updatedApiVersion, user, apiKey, entraId, inferenceEntityId)) + ); + } + + public void testBuildUriString() throws URISyntaxException { + var resource = "resource"; + var deploymentId = "deployment"; + var apiKey = "api key"; + var user = "user"; + var entraId = "entra id"; + var inferenceEntityId = "inference entity id"; + var apiVersion = "2024"; + + var model = createCompletionModel(resource, deploymentId, apiVersion, user, apiKey, entraId, inferenceEntityId); + + assertThat( + model.buildUriString().toString(), + is("https://resource.openai.azure.com/openai/deployments/deployment/chat/completions?api-version=2024") + ); + } + + public static AzureOpenAiCompletionModel createModelWithRandomValues() { + return createCompletionModel( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10) + ); + } + + public static AzureOpenAiCompletionModel createCompletionModel( + String resourceName, + String deploymentId, + String apiVersion, + String user, + @Nullable String apiKey, + @Nullable String entraId, + String inferenceEntityId + ) { + var secureApiKey = apiKey != null ? new SecureString(apiKey.toCharArray()) : null; + var secureEntraId = entraId != null ? new SecureString(entraId.toCharArray()) : null; + + return new AzureOpenAiCompletionModel( + inferenceEntityId, + TaskType.COMPLETION, + "service", + new AzureOpenAiCompletionServiceSettings(resourceName, deploymentId, apiVersion, null), + new AzureOpenAiCompletionTaskSettings(user), + new AzureOpenAiSecretSettings(secureApiKey, secureEntraId) + ); + } + + private Map taskSettingsMap(String user) { + Map taskSettingsMap = new HashMap<>(); + taskSettingsMap.put(AzureOpenAiServiceFields.USER, user); + return taskSettingsMap; + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettingsTests.java new file mode 100644 index 000000000000..51963c275a08 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionRequestTaskSettingsTests.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields; + +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; + +public class AzureOpenAiCompletionRequestTaskSettingsTests extends ESTestCase { + + public void testFromMap_ReturnsEmptySettings_WhenMapIsEmpty() { + var settings = AzureOpenAiCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of())); + assertThat(settings, is(AzureOpenAiCompletionRequestTaskSettings.EMPTY_SETTINGS)); + } + + public void testFromMap_ReturnsEmptySettings_WhenMapDoesNotContainKnownFields() { + var settings = AzureOpenAiCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of("key", "model"))); + assertThat(settings, is(AzureOpenAiCompletionRequestTaskSettings.EMPTY_SETTINGS)); + } + + public void testFromMap_ReturnsUser() { + var settings = AzureOpenAiCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, "user"))); + assertThat(settings.user(), is("user")); + } + + public void testFromMap_WhenUserIsEmpty_ThrowsValidationException() { + var exception = expectThrows( + ValidationException.class, + () -> AzureOpenAiCompletionRequestTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, ""))) + ); + + assertThat(exception.getMessage(), containsString("[user] must be a non-empty string")); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettingsTests.java new file mode 100644 index 000000000000..cbaa41c37958 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionServiceSettingsTests.java @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.is; + +public class AzureOpenAiCompletionServiceSettingsTests extends AbstractWireSerializingTestCase { + + private static AzureOpenAiCompletionServiceSettings createRandom() { + var resourceName = randomAlphaOfLength(8); + var deploymentId = randomAlphaOfLength(8); + var apiVersion = randomAlphaOfLength(8); + + return new AzureOpenAiCompletionServiceSettings(resourceName, deploymentId, apiVersion, null); + } + + public void testFromMap_Request_CreatesSettingsCorrectly() { + var resourceName = "this-resource"; + var deploymentId = "this-deployment"; + var apiVersion = "2024-01-01"; + + var serviceSettings = AzureOpenAiCompletionServiceSettings.fromMap( + new HashMap<>( + Map.of( + AzureOpenAiServiceFields.RESOURCE_NAME, + resourceName, + AzureOpenAiServiceFields.DEPLOYMENT_ID, + deploymentId, + AzureOpenAiServiceFields.API_VERSION, + apiVersion + ) + ) + ); + + assertThat(serviceSettings, is(new AzureOpenAiCompletionServiceSettings(resourceName, deploymentId, apiVersion, null))); + } + + public void testToXContent_WritesAllValues() throws IOException { + var entity = new AzureOpenAiCompletionServiceSettings("resource", "deployment", "2024", null); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"resource_name":"resource","deployment_id":"deployment","api_version":"2024","rate_limit":{"requests_per_minute":120}}""")); + } + + public void testToFilteredXContent_WritesAllValues_Except_RateLimit() throws IOException { + var entity = new AzureOpenAiCompletionServiceSettings("resource", "deployment", "2024", null); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + var filteredXContent = entity.getFilteredXContentObject(); + filteredXContent.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, is(""" + {"resource_name":"resource","deployment_id":"deployment","api_version":"2024"}""")); + } + + @Override + protected Writeable.Reader instanceReader() { + return AzureOpenAiCompletionServiceSettings::new; + } + + @Override + protected AzureOpenAiCompletionServiceSettings createTestInstance() { + return createRandom(); + } + + @Override + protected AzureOpenAiCompletionServiceSettings mutateInstance(AzureOpenAiCompletionServiceSettings instance) throws IOException { + return createRandom(); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettingsTests.java new file mode 100644 index 000000000000..7f0e730b8835 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/completion/AzureOpenAiCompletionTaskSettingsTests.java @@ -0,0 +1,99 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.azureopenai.completion; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiServiceFields; +import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsTaskSettings; +import org.hamcrest.MatcherAssert; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.is; + +public class AzureOpenAiCompletionTaskSettingsTests extends AbstractWireSerializingTestCase { + + public static AzureOpenAiCompletionTaskSettings createRandomWithUser() { + return new AzureOpenAiCompletionTaskSettings(randomAlphaOfLength(15)); + } + + public static AzureOpenAiCompletionTaskSettings createRandom() { + var user = randomBoolean() ? randomAlphaOfLength(15) : null; + return new AzureOpenAiCompletionTaskSettings(user); + } + + public void testFromMap_WithUser() { + var user = "user"; + + assertThat( + new AzureOpenAiCompletionTaskSettings(user), + is(AzureOpenAiCompletionTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, user)))) + ); + } + + public void testFromMap_UserIsEmptyString() { + var thrownException = expectThrows( + ValidationException.class, + () -> AzureOpenAiEmbeddingsTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, ""))) + ); + + MatcherAssert.assertThat( + thrownException.getMessage(), + is(Strings.format("Validation Failed: 1: [task_settings] Invalid value empty string. [user] must be a non-empty string;")) + ); + } + + public void testFromMap_MissingUser_DoesNotThrowException() { + var taskSettings = AzureOpenAiCompletionTaskSettings.fromMap(new HashMap<>(Map.of())); + assertNull(taskSettings.user()); + } + + public void testOverrideWith_KeepsOriginalValuesWithOverridesAreNull() { + var taskSettings = AzureOpenAiCompletionTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, "user"))); + + var overriddenTaskSettings = AzureOpenAiCompletionTaskSettings.of( + taskSettings, + AzureOpenAiCompletionRequestTaskSettings.EMPTY_SETTINGS + ); + assertThat(overriddenTaskSettings, is(taskSettings)); + } + + public void testOverrideWith_UsesOverriddenSettings() { + var user = "user"; + var userOverride = "user override"; + + var taskSettings = AzureOpenAiCompletionTaskSettings.fromMap(new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, user))); + + var requestTaskSettings = AzureOpenAiCompletionRequestTaskSettings.fromMap( + new HashMap<>(Map.of(AzureOpenAiServiceFields.USER, userOverride)) + ); + + var overriddenTaskSettings = AzureOpenAiCompletionTaskSettings.of(taskSettings, requestTaskSettings); + assertThat(overriddenTaskSettings, is(new AzureOpenAiCompletionTaskSettings(userOverride))); + } + + @Override + protected Writeable.Reader instanceReader() { + return AzureOpenAiCompletionTaskSettings::new; + } + + @Override + protected AzureOpenAiCompletionTaskSettings createTestInstance() { + return createRandomWithUser(); + } + + @Override + protected AzureOpenAiCompletionTaskSettings mutateInstance(AzureOpenAiCompletionTaskSettings instance) throws IOException { + return createRandomWithUser(); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModelTests.java index aebc2240983f..1747155623a9 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsModelTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; +import java.net.URISyntaxException; import java.util.Map; import static org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsTaskSettingsTests.getAzureOpenAiRequestTaskSettingsMap; @@ -65,6 +66,35 @@ public void testCreateModel_FromUpdatedServiceSettings() { assertThat(overridenModel, is(createModel("resource", "deployment", "override_apiversion", "user", "api_key", null, "id"))); } + public void testBuildUriString() throws URISyntaxException { + var resource = "resource"; + var deploymentId = "deployment"; + var apiKey = "api key"; + var user = "user"; + var entraId = "entra id"; + var inferenceEntityId = "inference entity id"; + var apiVersion = "2024"; + + var model = createModel(resource, deploymentId, apiVersion, user, apiKey, entraId, inferenceEntityId); + + assertThat( + model.buildUriString().toString(), + is("https://resource.openai.azure.com/openai/deployments/deployment/embeddings?api-version=2024") + ); + } + + public static AzureOpenAiEmbeddingsModel createModelWithRandomValues() { + return createModel( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10) + ); + } + public static AzureOpenAiEmbeddingsModel createModel( String resourceName, String deploymentId, From 9d9f23ca96e03a53f0447a8564bdb0f9519e2df5 Mon Sep 17 00:00:00 2001 From: shainaraskas <58563081+shainaraskas@users.noreply.github.com> Date: Wed, 8 May 2024 12:52:50 -0400 Subject: [PATCH 005/119] [DOCS] Add API example + diagrams to shard allocation awareness docs (#108390) --- .../high-availability/cluster-design.asciidoc | 18 ++++----- .../shard-allocation-awareness-one-rack.png | Bin 0 -> 25565 bytes .../shard-allocation-awareness-two-racks.png | Bin 0 -> 43058 bytes .../cluster/allocation_awareness.asciidoc | 37 ++++++++++++++---- 4 files changed, 38 insertions(+), 17 deletions(-) create mode 100644 docs/reference/images/shard-allocation/shard-allocation-awareness-one-rack.png create mode 100644 docs/reference/images/shard-allocation/shard-allocation-awareness-two-racks.png diff --git a/docs/reference/high-availability/cluster-design.asciidoc b/docs/reference/high-availability/cluster-design.asciidoc index 3f8e19b47d37..6c17a494f36a 100644 --- a/docs/reference/high-availability/cluster-design.asciidoc +++ b/docs/reference/high-availability/cluster-design.asciidoc @@ -7,14 +7,14 @@ nodes to take over their responsibilities, an {es} cluster can continue operating normally if some of its nodes are unavailable or disconnected. There is a limit to how small a resilient cluster can be. All {es} clusters -require: +require the following components to function: -- One <> node -- At least one node for each <>. -- At least one copy of every <>. +- One <> +- At least one node for each <> +- At least one copy of every <> A resilient cluster requires redundancy for every required cluster component. -This means a resilient cluster must have: +This means a resilient cluster must have the following components: - At least three master-eligible nodes - At least two nodes of each role @@ -375,11 +375,11 @@ The cluster will be resilient to the loss of any zone as long as: - There are at least two zones containing data nodes. - Every index that is not a <> has at least one replica of each shard, in addition to the primary. -- Shard allocation awareness is configured to avoid concentrating all copies of - a shard within a single zone. +- <> is configured to + avoid concentrating all copies of a shard within a single zone. - The cluster has at least three master-eligible nodes. At least two of these - nodes are not voting-only master-eligible nodes, and they are spread evenly - across at least three zones. + nodes are not <>, + and they are spread evenly across at least three zones. - Clients are configured to send their requests to nodes in more than one zone or are configured to use a load balancer that balances the requests across an appropriate set of nodes. The {ess-trial}[Elastic Cloud] service provides such diff --git a/docs/reference/images/shard-allocation/shard-allocation-awareness-one-rack.png b/docs/reference/images/shard-allocation/shard-allocation-awareness-one-rack.png new file mode 100644 index 0000000000000000000000000000000000000000..d5a3040cc5343ea6e169e51bf9b6d2924f4e238a GIT binary patch literal 25565 zcmeFZ2T;>pyEquIpaSCiqJos5VnakgdI?1oRH`6dLQ{~A(i0#c0xDosl-?9+(u4@1 zg`kM2^cs2<0tASZ5FjMk1AKeG|J|LryEFg$?Y%R5GlP)i{LWL))6Vnw%s@|z^AO)5 z2n51;^Tstp2xR{%1hVJaf&JhUkBT40;MYOV8|GdR$R+LVAC@dwTpsw4#mi9ZDx|1Y zU>3aD>!_})4uO=Q*|%)>K_K>zZeCNrf9wZpvYOFNCb@P3xRbtWPWAFj zAHuJv>wVi_h)S2ms_bB$k+ingXkxy_JlImmz2C^Z&)Y?qy~(}pYH2uczn9lv)$W&D zTdtO9@E@d@gcYX`y~GvO_me+)$V9^3VT!Sqi{+O%l#oqh1ZA(96~;>ZJ!Z;0eZ?q2 zZlz`3LFI^>1Iz5YzKnUgF1~GRj@;#2{)&zn_hH?E-E*crGahjWf*G5gd+JrhSJ*gG z#9!5xp4h^0`gXts*_B!)tXTgUYS@NLwC{JRRYtbRxo5}QE)oeAEH(_HBgsld+7(^b zN(s71xvXRB2-_7z#3#`VJVMlf|JPi5TEjaLYfp-^NA)R!Pmi0a-@&d3NuH(H$;_>S zB30DXMs+6{P71s?opigt=5tY2|KN*zw(z-GQ?8x8LQWlA&F)=Z$cr~8N_+LpZM}8r zoa%{pP?h4*P(W&=vsJ?_i<94`J#1>ys#fm)5&S(SoG>yalQV+IdGRefr7FM&CP>0L zF(=q~?C<52p7aQ)mGJ6<9oR7)A#+l{r&pcVD$<7S+!KBd49q9yV^GKKJ|T$1iQK90 z`g9KU^GO}#it86dRBAxP!HyR@C ztctO3Noz6poYs2zpW|j7N;AB;ZQ*cXORD&y_10L&&Xy3~$7*OeAIXvaxtDq(AGZ$w zj5!90N)Tgf0j%V(!1*FSLrn)>r=&6MRK%qX`o(BV6C*q9K*BI*vheC1PRn0DI7XCg7%~M-mNdS_y z>#+Mhqs#4)rM^~7Q~vVd=B)%9YW@%VXOXM zoW;wIIu~-F&D&JV^Yz6my9X~%bl(jg>3?f0yAmPjD@AEd{@K)1SY^h?FkVqfG zo!>nZHj*ZeB0-xAGH>AB)MO6r5DS{@irDuf3Fu$Q>^wX}8r>e%CMl@R~o}4W3G{GTZ+)D|-Cs%>q5}n-R*LU`OD~w(iViAq>m*k%CplLJG?tD|J#m@c zWscHyF5GO78Rw?~qoGc>cfq!B?I3yqIHapHn@$^^wqGoc#~n*UmHQ7@1!-eYBW^dE zB`s@3*cYZT%wiGcPaYKN?nZql5Hq@kQ%KW21v2Ip<;6W3h4-f;vij|{XpxAGDLK1y zltq`%2Vc3iU5TRLOCO$O^hx?Jr;KaMcp~OLZ4HLNIsRPQ$#y?+9?@Gg;_fhO)oEGNK zCCVcgHd80wR^fgop}^Xg+GD=H_7!(Y(7ncWsY;XMlQh!BBD+@Barjem%%zlSoJ0z7 zsB_+}-$emA>HNp+uH&J7j*AzXTjP{0IoKjg6<3LFvzw1MPL{;sqU1o*#&UR=Q!)FrL(ze2?d#U$G7{*(;SNK#^=r}uJRY{s#Dw88Q5x15oeTf(t ztxl3T8rfHHJ*7(cAH;^Lp^Gd+%`jez&NID-)|@xAB81#AZh(bs)f9ww=s;})7yK=K z-t~F#7q}GVSCl;Qb+199eHSqNTm9yB0fXbaJN4N~v2dRw7=1dP|IXe^ituaqwC#TR zKmY#ka3G`9fwlZc=X^=fU#fXpDS6lDt1cxQyXd~6p`ygBF{q`gaNBrz61_RF+*07# zv+}^3Yf5sFR*JM#p3>2cxdayJN_GWm*2EY_^4I5lQ}cQ3+q zpmW}Fu0OJivp>>!4Yhf4N4G-q?lfgrUiy>lvM-QbWM~k-{-q&A_sC4islpL|^QYKA z`6vE6Y;SM5dx&a+TofC9Gwcz4<&wnb$_S|N3CSNWXyHJQZ~gPk;ePM6Ylu3w<2&nP zA5i$h9WiT5R)z26{O(gHOI<&5Ev-8Ay4QX}ErrSLi&o2MY`5S!rpt4^h+=*zhAj}e zcB*`aHDl*-o!B1jJ6Z4msdz1B`KrTQ33gjTA*T+k@;;!rh9%l}(B>cWT&S2JTGshO@=|`+sN*u2lO7v!PjOn5Z@W}eOq5z`j?DK| zU3y(soUZU0wWM*L7177tP8gZr^ZU*r(sF3p$=#SNqJ1t%E`1-8y@YRGPz0>Uqz`h; zXmG*yTO`rDzdRjow3q32xo0~fQ^nkIs&zAlXk22S&&s1qZAGo-wbzoBB#r}rBB*Dy zgDE6AdsuMyum9-Zknu|`jEyhAInj1~^@YOjIJ`715g}3LyU$2uaLMH_N3>ST=JcG~ z&ORBgaSp0J5@l0b%|q{CD8!CuZ|PUf^Mi|3(`SGYkK8pU7h-_B_juYDq--*O^#4&A5I{7w{#2)8Gn&gW^m zn?_2h4(wtykdk)Y@|#v?e#WvG*v!3M+oK9V(}pP`i82_cQ#-Ii{vJ$@7-^<<4(7y< zc>gt{`*xyiCC-DOv@4-sZp;ap^VO72_Us5Xk6V{oMr8R(MX5P{(4D7sA}F|3z#1d2 z?8j;z&X@4#QR1|E@tIV%ov&kU%i`ffBMN@;aD>s-47q(4Jl|8A+LZX4u)7N+)Z=*A zsiZ=VkmuYpy5Z^F>{8o7p3B~y$0Z~!+-QRJ?wZP`To?5-9 zwzucolMhq21nb>))#Jm(!__Cq2f<>~hlM3xE*k+6gFAbL+$^JB^FCZ{?j?wun{(kbu3ceCe2F73)ZnX+-$??WJE!_Z4h{X-K2z0QfH3^HlTj`CSc3upudu zyC2`$M&eQ73gblRE-UNy{xxH4t;K}{$jJO-shQQlrpxyBJ2XQwzU@Wq{$*gt`i~6h zg5*D*hrCHp+m#vQmQKbldm(4fweM~f@;CR)F12dU{;Rih)rZ(W+*)4>a%@e>Yin-q zj*E{MZ*u7ATv#>p#RC)Pnpm)?nAmf-Wq$gntFr@EsEOe*-;W`Fd#4epxBqPplnv6S zF!!(zA*G%r)-qC+g2$tlEKn=m?n;<+d|pmlMTq@rr)YJgyTk1efq ze`r|YI8T>P_gq-opz{qgf7;+^u#v&Ve;^)Nc)mRhx>!j}tDMIR(~BB9oTV#Z$;Yk6 zjzKa~EHtEBvs?-}a~G2OQF8Y=53+xGVT9fTxf6yE_^y$`wyzN_RGg~dHEp&kk32K_ z;7_YxEe05PooHYCtG+99Dp6vKBRHC7J!77OA`emX$!oNo+>3bWMIH7VeHrww*^j^V zyo^8!47nBuegu4`#UXX~wBL2icI6NOMkc>b)gY`Dvfbfr=d-YF0D z;gD3BDnCGjW&XJug=&Stsc~*|UH`X9o-W%0F+kVbfF~?A-^&AGEZJcfd(G?Qa4V_g zZA+>Xg9N+&CKVu9E^&*R2M zQcpla+`FH(4;O`qbd$7Cb^WM48r%`j3aMMYRc7;#sf*oDJ+JUS11t);o!f}) zVyIeiRGJ5Qg*$y5_G*L4X}w!I=@s=I07W?UJjWh&5K;!rcx7`2hybFQ1tq&&20F_b z3N;dTQBJUg!xwqKIyyKwbbJDJBaY6GC-^r4^2)aW(4shj*V|xX4AEg~XpxR8gSnQC zfWneEANBnZn+rfqYcKY^+z4=*NjE9HDYU)6tTwFmGxv<-vDOp?ua)KH<=Qp+d>G>% z>1$q50N|Q|;X;D*k9mjMh@tqNq{hd!-IL&a$01Uj&bMbScebnaqc9r*j%{f;5gO|Q zU~;2R_PjLb^)a&b#Tk2bhHsd&*(<0ACwaU=?lT)=?i&ru16hm z-UYAi8{XCjUVe&v*R2e#tZl|<0kZ-ez&6&B5Qq(TY0?!d+CuodV8fPx&DGg@b+&D( zSgONXrT?C}ZC)7lu62rdUuZsJ4JbpGp>ASN9QHS{rMY=%tYA!Lo7nQ_0B=y~Ds4@J zS+=EfrUYfkQGsDP0)a^EH+M^n*i=YNE^9OY;<7{KS%8dYhSpo3yZOQ{1{K5ob$~o= z6U(%DUlY&vY+xd6nsOTxWm#btXBVrN`C5F#DMr z`c_h6Vu1cQ<1r=_kc#y-@mw2S0Qz;v1gPOBSofq3cPR92?B&3!ni}Q!^!NxWsATl9 znlDtEB^c|=IO&LN6FhhmNbJbWZG~;?OC$sPE4|HAS>Qm+)$)7id8c#CV)eNH-O`5f zper#DNJydae5eUpWZSygu;n)9DThLh@3f6d)&RA&ENqfcB|n=rZX4Y`7N2?mK6?N@ zomFrxG1ay|K-pZ&fZa%W+G-o@+~q%fcpHS$Hv0iKB02i$pnTE;ynCNrEF>Ebc zHBL3|k=q)OngMm>JFuFYgcxUp0XH@{IkJ^%r(Y&=TYSiL8<3B*m|IL*RVqd$J*a%w0teF&=J}p^D0cQ=fB$T{wGHTuKETKn<97P=Umt|IlzO~n->u@6{BaBv`>j15oqtJID1Kp=I# zhUh(N;RjZM${RFgBNo@b02!BSivd|1h_J#!midVi=g`x=rtE!+=P`NxQ?ih{55P=4 z+g>1vWQEvl8D@TYXXcix*ii)zhbtG*f;-V5KL(|9OlR@~-(WM&&aIFuCx^kKd)V5Mr4DC^I)~`WVvG3LCSVCg9gKLpw;c*oy zo7Z`betLk876UZZ!dfg4dpQ62+!DYAYq3pT5Xe05XuwR08(wDj57kWee}1Do>Z+?< z7*NKmSN#L-TuAe`g?UNVz)1-2ZxoJ|ffo&b@RJoi_6tGcwMDDUp%9|bi!%Iuq;tMc zKmkl}e#zo_Q_mx`5V@qNzQbA!JKxb;6O2NullXHoOBjBITEyCyOdL&wp*I@cieJbb z?Ef-@bBp#$hW+h=x86KKvm^Noy3^c{^)uAOhi7Do)4eW5jdLmrLQ2FJW=Qk^Dj(rJ z>VQ|Uu?j2iUquq^{MjV#m$B#PykEbu7>pG; zCd|fGfH|JZ<4Tqp76|K!Fe8-kGVE3;y<~}*GAqGBmv-7n+6yf*Ye4W1pQAa5c%L4* zilU8~Vv}{NPP!JYz(cmY?dl@2*UpDFPxYHQyC78WYLEWzd3}phqd-(wYqP-q@_uH_ z3)iu-nEF5t#;~(w&1Y1gSKa0^T9q$T0b1|DZYjpJaK2sW=Bx6nx9d4qh1vfgXb{bY0vKW==7%Um8(wl}8e)ewNQEP4R9B|An6T8=CYDvm3>-F+k zEMc=pMrb5Nm9bXRl`faNlbdiw-qp~2{7Q^fZ){rErDml#OAjpZnzxYRY5GV5zNw|8 z*Z&p6SD)fP^2xcQvM}X=?t<-=&Qa36`RLW^a(?knmj~YlWaHt3MzFMaC|ns2)6Jyw7}t`XC?MZ>xfF37>FDwDe)uS- zr3f<(9J53V4l4Y2{4AC!7&Bg*yi6jM1wo;YtmYxG)$Yl6Q2l8Q*hmy2@YB|s>Ztt* z%DQJKBL_;>hZRA|SQX0p-cAklMK4ZRZkj{Eao_VuWnb;ZU|iFcGBc$gH=BeOSG<-T zwFGBNhI%Ro<*rjPxXzkRnO|KN=ljq3dEfrFh}GHk$cc*CvSx?Zi& z2YCvSo?3gvrC3VL8a;jt+rPTe5xrm()k>}mhKVhYdez_c5H8FoFyw?>(qnOaC#K)8 z(8(04&I*6wASJhzdhpJ5sqV>@chFHipd#z?d5t}jmW9XX9)@uKHKR_E-mjb{?s|D~ zdHwF9??qYt!|UcPy$gr})l5;;%al0Frz1&{X0`s+PIaYNqPtLZVNZ0=dsSVz!)CIo z*R)QwW(Y>A<5%=z>jOS|^K(#rfPi?|_Wq-*ILa(0Ht#{`)c2T}DSIF!lW7-`n zugIFYnQS(@eV>^IlrB(fe`90fDQ4$Jd1W6YyWvNqbvtE~J(O>15T$9C2A`G@OwnH) zFUxHz`(Ei~sL&7v`F%4Z`nBp0^6|uw4uE|TR0t2eP16mSN+N*LGg+U^PFcSITo3Tj{$s=OM0YNu?yQOYpqF>ht&df$+g*Kiwt^}tS%e;ePl3XIw9>v4WoFhV;Q~FK zPZwaZ9!ITXmhu_~d~OUxi1ZCMeV`X9*z`B>E?5W}yDQ%~NB8T0su`^>ra9`cl8!MR zaaaLzs9_+1&&tzcMiiPQ^BkP_A8;itWU+O1fgk)h@m~cX@c&-Pr16z zdzd#3L#gV9iY`)keGacra!yHKa_34a-F&;-n72O=jGKl>f4Y^&RSyf;ZE(ZVl-$d{ zE|dnhu~_QEY8AZRL5rUwavS{&rFMK(eeRUIT%;KscDViYHX{3e|2!2Zd|M zwy*w;Cnnv7@}-8#Qr5rcHs)W4)k=Xid5w{v*3x$}6xyzT(LwKUNwcqZz1XGbPg}my zk3L+j7^xb5+EdcYZxxq}?zx)23_`6vB@P8IqVPvZ*i3}1AbQ-jA^0_1UvXWr*MjL< z;BG0`@VoP5rKA_I#My2(lCszq_wWf>B_5Yzznt#($}tBv@PubGHRl>HwC($N9j2m8 z@%L#CM+D~NipV-#?w;f~W~a#4weLl!-M(nyTsT-SeX{#dTT7FwABOPp!L)l$l$Q9l zfDcmh(-zp|62-%?SkckCSVmk+Ujnc=i~FB!Q4V)5tf`L&7QlrZCC?d2`5{pf1%-VM zE|TyKH`&Te3Nx@k_K=(%?I@nOKX1z2V=5G}BxQdp#3>6Zl!~aa=$=%{Nvtk}3Z)?e zrMJ)Jz%fWFv}rg^&~9U^Av!s{kx#fNKi#3=nCWat&=A|bm>DHqqWzNH zrhBW62VI@VetBU0zOA`qxTf9Fn=r!D!mjAD1^n@dtx%_IsE`%R&l0B64=~I>F$Zkm zRhY^}z^?rVS3}-!B|b6bGAlpsGKrVVq@DN@)4OozKu;#KYJVln51C;5NY)~7A*v%y z=~rp8f)qla;rAKolhK_i!PhJ^h!xNIhzoqd~Rh- zcyy){%9&CIj=fT|p{D5bb4wjS-MW4ZgJc$5U$7mOu`8^&wa>PgHQVY>*>JagWuW8$S*Ib{ z{!?a&FC;^jxP(ny$UL4u*q2%8)Ezc@<)_@C?9t3#`%3SOKcdf)CJzZ&@IwUXlZD!PKG%~3C&1y?dI6$=Wq$xdPVj+%jRsI= zxmmRjR&~vdbD=kIKKP&`$>L^);}z}pR!+g0K1yQg&r5)d^Cnu5l48Cjb(q(+5aENh zLoQ0MkwKXPyv>?Kmd!v8zjEVE`XJe~kb(g#o$+bjFCSI>sj=-%{mpN?__USkQC>Du z$=iB$(HCJQhlSgV^%lV%$@^;J{$RQm7F0fIqZ+r&Yy3XLwQw>m4Rf+7(?n)4jaE>M zYboc_T*jMFas zTNNq9%|UwYP+J1}=Uc`|%n*G|)&$+^=KBsHh3#J>n&aJkGue7)9j%VJ49?$m1<@5V zO`A-}AXjR-w#i5*m6VVxZSndcKi@&EE%AszTDJx)Ef;viOart>Gzit7|KCFOw(O~1 zZ&#hB?8m=5hDveScM=X?yz8t{KuWH>4;B1WyKxF`niOaEc~5)gWl}EZ#t?!;q#$9l zRErs10LOvOkB%sr`5ldbjMl7n?%gR9UyZG<&kY`=A8bk4BDX4z5Y5e%tKq zE?#oDj2+r~*_7oZ#Wql%gdPvZPk0wJZboeIe`t)itDxd%Ui7Uqq}&_rS3oirG3dZ! zhs5@8DNfTa`N_*-M?JbLYKhCF-0P^u$h*EO5lCj2iUHTH`_D-o$L5x5M?Eqx=R<`S zF=d4yUuiJ}Das(wn>=61gy5W|w3xoe;UTFQNfH?$=e*Lz^A1NkIN{S)*RQ%WkaqG5 zQZO35miqnA^n0(n>l0O-$Z~GS_|&JZcvE2Q7vyH>?w_Hm0;!IV3GcBRGq|AN1O?Yj zLsj;U4Jzz-tlX9s!n%vrLz0?|>z&M;9}U2ARj4{8mB9!-qtRUToKrTWZ5-YsV$8zWG_6Zr@HVeQNJxBs!5XejFo;p0fC7QhqiVKCSqc#4$ z4WKB0cyN8a+Y6g4xfSYjTTuemR~1xRmez9AVS`desO$)PUcPybAhkGJLZ}k{PQ~`k zSAEZePHrf@(1!Ub;j&v8V%SGC-rhi0*9&ho3!pk~IfV$kbD@&}s_Tl#BB8v09NUjr z^p*Ox6wqhVHhj5@C~T#JgnfjL8d8Q9KnP?1gYaS7W7h#kzci_z-4|94C<48Fjb5SZd|Sc)3Hm(f6KeV|KytGf4tqo z!H|PXu^z*6tEJ)}t&U3(YcI*#ZS?0%Y~X46^7LXRDuof>Xh#STj~Z6>dRSAj_T7Jp zGu#Sih?)v!S2N<=Z^y zvsl|_R4ttmU7kPkfH0XSt1q*WmX4Fp|B1TX`g3k+RrO5=R>2 zt+*|^h8XG``=8*|EY%l|WcuB<`-c8YouljYcZ7q3_eH7b?guZ|VvPEkxIVcHm_i0) zX1dXO(|>EG2{lA1F{Sro?XYTG2{xAW$|PsBQ1E7$Xz2Au#K@ms{{|sCHRQ^`9DSnE z9L89+N^iYd6AO#KNkYteY=zpOJ1eI)*|aQF_IXmM@S`5VRMANACV(2e^h`cYx9vW* zI$$H8`rWdWVzm?+wa3Qr#3RgRQux7a%|k+Qr4b>hjTvU?-@TNTuIbU@O9b&|ZNto~ z7DOW~>Ws~6&T5o$5HY9p`!hcZ=gRA~+NChpes4{fPU-Dh)hcrZ^$%JJY!WdMbins( zA2CJ?w>efaoFj(CJMEnkg@QIFgX(L}2(|^2{TMl+jb#SDXw_x!z(so|uU66KAP-`I zvi#dG9j%IEysKEjph2zkwU9$o*8K?c^EwF*518qEthnccc>jg78zMG`=$|**Z)K_# zzdR*)l0Js7aT)t^8-7UP3F3}Ao-WJ@Q=0Rw9gtW`XZ>i?PSp~hRe%%_$X-4&PyOk) zQaQWs8Q@+)wVR?(?+edVVf!@`DQY_$7PP10=rrurzL3l!jP*KO>@^zGTk`igiH_C) z4WdtVr_A2xgn=8KCu;RaHyj^+DQ14DYV2edCQ;#WnN-VbDnqG3!wlwc0KfmC|FDr33PG=duOvELQJF8`FQzp(mD{6(z~X zv^Z{S2MGFi)xMy$!GaW_DE}adc-9KM@@DG#f__c#u)SZOxvwvPonGYNqx) zq0lc2JHlj`Z4HbQ_Sc>HYlek;;TX!BP!%v6GR-Du)gcq2#QZfnTe0r1$XzkW(pgLL zh+26>xN?KdB#*~>=~AP2h)?TZ@Agf7PNWN4RcBXte2^eJGJiASPVx_wRN1M9f2caW zkXP}KRYIZn%lpY_cSAI}`vRv#jO=+rc~&7-+i>yR!KwN8CzIe4m@O!lwRGJ5F^bA$>6i>LXrhQz%0F*?T1E z`Yd07>2y(S{9?}3;>AmY>tDGaPI*`jmFR_(O8q`(pfDRUbxdpVxfRhzwkA6)dBUeI z%`Gr^ctw0YW|Vwj<@M(x@3A{~9WAWqp6!jb>!W85%B-wep`W;$) zZVN}I65$M?-WFB2ZpgXjCTpDB2s7Q9AahR}wy?~o^;E5tKnANcyJ)XGhq1h zaud`WE{X`Y*L-LS)vJxPn!DK{7A+bPu;JsYbmLb2!>?yfI52O#`vweIg+1coLTM36 zJLWHB%7*rJ4h+g0RH9Wa<)avnFU^;GK)T%f+IqbI{?=5)h2&bhIN>FN57*|u|_4)5p~(dFlOLoir$fp zj(Ek$0UdQX-|OBZDUdK#)3yyi{OcaU^cls!A$NK*=%@F26F*G0o$^tt^|AXa_9CU5 zbe1`YKUMQHn0ZBNjrwX~4Kj@B~pmE+-X z=+Vt%$f>M7xx=imb9D}DXm=7~8PAxm@;NA#aEHHNHEpYyyik!|D}4FLgFOYjj>9a* zCH?f%sRpLXiMe9?0)WESx*||&{Xw+Q*4yyT_INH;&nt^~FS^gd?J=w3S1bPP->8A^+-KtxkF~9yI>0p4h|F_-^#*49gkdTr87t6tnFM zPM6|?30G#vW`Mo0@GG~+LXS4JDIW_K$H-sKz;a4Zk>w`BFG+YT1AJS(T>GBJj3n$H$(Px^3tSirrj#zKL4Y}#P zxDPpD1^Zg#PG~|d&Zd7qsUJ-3-t@7`2uS&vO-EI&fBRTl`87yM+Q^;|K%1x$RXxgc zHz0`c^4;K(Gs9o2YV6P-deSEc0(8>UXo^+|!Zk`I_S9C6jYXX^gQ z8v3dA#JJ#Zl&v#J?w00_Qhp5cMthNNa650pHuAZAILBoIsC<_o+S+?;D-E_%WhG8t zicEi|US)+Oe>F|#mKdKtL@i^Qf|j?02jXHJr>=M{Hkq)N^H>GW{tDGq9p-~q@AD1F z!Kh_EXm1NmEls3n&g8VsY%)vR z3W{n%0z5)UC_mo?sRyHGM%jbhT)n5vp%J;Iq+PFCHEoKpown z=Tc{SsO>@eMal8xvj-#&`tGZHbBj%R_;3WmszRAlF5&RU=WC2kJXh{VZ?VI+pe50@ z*xS%bkujf875#U6?}C}kO%3GNGdI-lmiSp6nqOoNP^&MaW~X?HlKr;W^{tKlKDP|ywzRx?$!X|u&5%D)5E9b%tg@!cJ1_yu__61t4NK6M(M#7 zjB30t50{LO%e5xe!;p4eEVq`?1L~wWed$;3l>pU^-qCO9+SMhWCVYi&-neWc?*bfg zUF2yKS402Ww5WCEWr$9_d+?$?mtQkd7P&}NT|@=(h@}E_;jCa|8fq$X!nnVb$nP9`kXoy{6%M?pIPN?i={&sEoi84aAh)F% zaj>qmw#3nT_^qq|o<3HLW0MdGM6hsAf)tnAQbh2=LiHxMs1aY)@0^xMIIpiKwQcKe z%{s+HaHIJ~-{r}X?ZhWYl>5#X6BKGSwg`)DqnLy~?|;$Oa|cQOO_Ymv&c_m@109m$ z(qSi5of!0AHB!vYzF!Yr72!Sww-SSv`7^C7>2=9)*<~-owpwWzDJ0-Ii~ME7(P z=B2osN(*uP2JEIxs^{jW%w2aQ}F8H?5oiW}WAR*7TUFLT8_q6{* z&0VBAD_ULos=J5)i z@D${i?<#X?{=XCb7aMbwrs-SB@a>9HrS)#1Cp{faOjF?oUc&G0y2sl0P`QPus8R|m zq^C@f@*QJNr!$AC_mnq=iI~~$TZONBSCaM+8(lCKftfNKu4M}!8>6ZOvFB(Y#RE~Y z0t@6$7@T42d{m(tL>&_Y5 z#Sfoh@d0?64By?2(5OiGd5DC0dFjXv^$}|*T~GNKxJPusEkX%^Q zSbb9lQJhX=00UoPV09Ss*6xRH#xfN3dv<-OksxdQI+E@E(Yc3b5X9UlxtNr4DuFOoI3m zj=ifmc9W&S12sVrZ${3HoCgnYa z3hSpREbM>cJ?68x(Hf9}(CimCjXV7MGIB6{Ddw`q z>d~yUYa#^_*G?MC%?B^#M_N}e2EYg+5H`r8h&;JCd&-llnr{B$cy>$%`BlhIxnE$z zoy3{}Kz;PrKJL$h++upNWI~ z46VAQGupT>KqnJu?*(WT_2+J^r|meH^R5I;L&&rugef|Rnc7z9jpBge4NZcLT}&0~ z14;LIu|i@5PG6(@&?+Q59$YWHUoNb*s5?sX9ZTN|9N*-Z(3Qnle{ksuoSIQB90}g^ zR$ltkB}!^?v5xv1k^rA9s%MrOqkdE*1WyoiebwzX|ori^VUK_2BhU@ z18Jc-iim>@OzsJR;w!K~zfGG*i@O#CTLo9KIjp(i+DW zGW$u;$DhyPlYh`B5Y~)7fV>&0SVJT6>))#2{(2eU5-PKdGxv#Q5z2GszBsB_Dj@Oj zAkJDjS;jzl>4%gkCyz(X)>jDRqeng|=IFE!DRFfWwHy&r%HPO%g&+ztzj6ai$=%Z3 z=O7YRAYZ?8lfWx~?*Vj1q6nn_GAY^K`{~=lTraCnQy&{BkTvlm$isQF+c@)bbWwIDAZ2Sddo4OYRJ_yVUR4c<<%(w z(K&w%VsMb{5f7I>VI8qGo}&cCgR%#x+_d@v7LFlTs;S?v7b~f%ED142Uo`Sr`=2h_ z65~7!F}OcjB-eRlB%MBkop{Bj=iBTN8NeebFK%P#3hHJWZpR~&bu(=$e9u61#Kl~J zu-3ccYdw`He=CASbFjnmg3N&r@N2lsR}LG0P6cDxrQgfCZEm{;B0=#~MiHVG9)lUV zVGHMj|9PgRsDYOLmXQ{q^S)(X2xU3*F6qNx1|1e-JAv%G95JTw`;zv2vM!$F7+ zI;ML(@5K$+%JcKA2Ul|W8uK7^4j>D;HRrQtU$tMoSdI0g`y2Yi*MjkazP^`n8$--B z&)yl|kM_aNPiRr-z@d%?q^@SaYG-055PSnzH2QTz3V)WdQJFF`RzKBOr*G5upMdkzDPj4@8xId$X2p_r*63 zu{p$P>G7elOClNfxK`M7SSajnVxhiU+nE!-%o|NT)lX2v+oxy(lz`?FsK6ePEP6r9 zH;8xMzWo|u-E`RQ`ExdX34oZmVc1vTtVvaA(lriz&|K4m_}0A)pu@7j7D&0`mVj63 znbHy5=7J{Z(7C1Uc;N!moQ)&<=O@~A8pi~af~U8Ma&i&Z4$O9Lbtj8k zkqe-lgU)o^oF#0#R zkYeACd~9gEk9PkbfdB3TiT|?PXeV5*_NeqY7g97Y1QKf_4>gZ9E=b5yr^dg{YI5P` zT)^NHcp1pF*f{UzEoJe$lcfLRDT;sIE+Ze``d`b9JNVeVcVebrX2s1ViwzlpMwLjEs(Ole?Yf;31TuC!aQ)A_zEFeezrIX>`Ju(<{wW`0 z!IaMkm#JaoRVlE-_h_u?aIVl2m}Bz0uB?}J6U;H?)sNk)78ErJo#p%=yM6usSVmG02|Q=Gy6hP?!WQh4_C($bOx;P-M_geX5h|M}Ze zJ4hI~R2Vk?W8vZ`9%aINtY%}pK5_xfJDpWjRCMMfc9w4Pdb8 zk|hAR;-B*Tb$jNc{WM<={hX0YdX@VjA=iJFILt{rIE;!Y<|!NVtpA3j#ty9zX(@44z9S zi>wU^x?^h4+d>*V1qClfK`6 zTiOZn{wY;J$&N2OHYh!`}mPB}Wz^8QkA6Hkp1YG|8a#^Ud|DP+v zH5a9^!5dTYJb+UB-wBdF(4*8@|NBnqvpbq3vLp|5O$s1{oSnW`Ar^E*`KHi;Hc$c6 z{83`};skrk`O(0YmROr}+XOhAg+{6zggH+!=(GUcDOEYXm_<+IrdprW6<$cl`!6@f zaO33y+S3VF5)!vn!u)CDI7ZXJDQ7_lSx+gfC)W^{9`5Oxw%OC;Fs>CjTR(Pt@d23Y za~5rgN)DqBThuR9uGqM_Ez|}nP77}fLSSj#dzDFuI)UgmtQ{k=qaNP#?9>xv$4?)Q z03A_r`+sIzYY@M5KHWYRFZ=~G0i()`NY{R3J^?I3!4wrs_e(+4T55u5YY!KdFSH7D z1$^?~rses!Nl#dpxM=H}!Jo@d(O*^<7XDIg)n+JD)WMLlTUdgp>Y>aKHoax2pxxK0 zBS33ULh&c)Ov*${p**U+3rSmBX!2h1?{4FZ`93l*FyJBt1k&@GI$cpw@k4Sf7G8{1 zn0RTLw_?+l=H7H3Py@`)df+xSa@dJ2^|tAF>)?J}Ap!BuVNoGySXRhk?=uicLG4XJ zHjyMns(@;m*xSm|6)hQJ4|H-yL}zAZ zn%p`Nl)ShImtvJC6%-e%zLu`y2Ly-A{f!onV=xg*( zzazXm+1hSa1*Iz<)9bWvZ3RqcfN^=BJ&=&BjSXUY^2S<}cx|8Fl36YD{__Tl4Mv4t znCqvMw)~iwnE18n3)_}hZR1%ZIpW_?YxvQ`_`vnnM2Ghj2c&MU{oyCYZBx}F3{6k} z=OVy=w~;2{_*4+JWc}6)dvj#&!}71cWAbl;WBIA-+hkN06A!PcJVxX~{2%RHdpMNa z8vjzUbvAO#)^@278@b2GV5eNFA*B#9QjsJ^n8t0K*pV<>a>@O&qY~4&-zM5FQ|^r1 zhlbq3$dJL9nX`tT=lr+NALqYw_WQ^8tZ%J%eQT|Et@mBOcYW)5G4#zYpzTF|l^I4+ zG|69n1fd2n>TzO)#4~6Eu=b_%u!UT0esi#CvMmo()8(m(f5+EF&E9ekAMn$|3$Sk1 zc;%Pqcbw>fpms&H$mbC;^wh}RSDd7!d5srp+(BTXNY^&xSC#Q-<0|oRq5RHluwPYL zfY1KQn&o`Ium!UI3wn{fqf8Fl%EUGOfL`l>MOh+z$!W4`aOe76Lo}I#RP@{_hMp!x z>_n>o(I)%wNr z>PHlds;aLzx6O+oCrjU9c0u-D6JM7dQ1@0^sLcaPS@TOT{do66Ve{z~*ad-Puyw`R z%n2k)+=B<8qe(h^oE8r%q1hJ>3wY8Wu_D%n51$!|nOmJ!>Zqz7ee~6}NuL4qLwSEB z4lRMYANMY+d3|8^DuNNXTVpqvOV5al#Ds&aWGp?S5aytnw>o5PO!&3 zT?}0{P(BbsGxcbm8Q_P)0{2Ib7rCuwjX1|(W9g_3#=0jW^w_R2VSt!iP;`ktQ>=am zau_-GsV<0+e#N8XUA4O;Kk?2+`3LFUFDBGclidH01n-xwQACnlCMg<$R8vME+ zI@{EhOFb-D-&r#8^^RV#QN3LccF}0Dc_1cK zq{PqAv z=}Uxd1x!7M8=Sg1+MC%7V+rko+D`-2dq8gKx#pHaxp+eIR<)IOah!dKG&xApu3n?^ zy5hM%OR#PdPc-jw~3_y?rl0u`aKXLt5MZrTuVA@XH2!EE`c+CDk#0J)mtgFZVvp4Y3BxPe9g#%3(uMjpnuUuwcdFw|qDr zDCL=g-YypcJ$Ye5q7QzgT%4>DAfMi{Py@`=H_%2_db@#A+0@hpVAk3ZTp{5TS_@4^ zLY)>5RasS9k4Fz3pIjRx2|{67vk|a5RWpSZdQhnZ)DUYAeAcAv>WSWyD-xT*Y34r| zXG{>9)u)jy3kL{J-DNYhmK)f(xwS*_QhOj5nV`dhvH=pP^%pnB<}U$+>T_KLta7QW zq%D1!+iggfam4cA-JDDYXE0{y?G3CUHGO5h!WwJ5aacAhNDpi`)lJl7N`O@8RV{Bl zD6D$B`2tZrM|y#FZjm_IGY%)wRqYPI13PqyHxzg3uK{LBT6QlYi-D-f;aB*1#M&VLr@ zU0Q3A8bM5z`E2^kQhU{VI?(#G&-j`%qE;wq#r!TD5|0uNdE#5#rAg^Am z+dI5M-Nu77Yj`r#4Er+6LoC60BN(IOvywWQ)#XFWNMWOmQ>b-dFmbS84C=ns^kVES zjw8aV%x#0H&pm>$&%I*K>)!Db@pBg~g>;9se)B;~B2jb3i}E!+%WTh5%YCB$S(Rz@ zemC&9>QWCXs*zOP0o$UabDXuIu$M)9fooXPBWr2!IN=ier_$J`9i%bb+zCCmkB`_Z z`T%R%3jnRy4XJcKNrQZ9=_Oi>+b&!L?F0Xi;k3G+*4R_^$tL*kNDBywo1tkvrpuAYdnyjU6_EXWwo!E45 zdI6GaGqd&w5ihv(Dqpg%#bGB6Jy-bmkp?9WA1|}-z z*kOnXCPc~Y{Dyswo`M5v#Ryu?Z2-x!m%3M*)BRyx76if<@N% zX|TOK@B59RcoLimTpsPG=G6(ipu)-!t?^JEo_u_Ob>-h6kL&^vfyv&$WvF`A9 z5SHE2>EqSmn^t}($L=4oRyo%cx9fym?mJlQ)YP1oO_yK2O+{y{W>M!V(&X|#bF~qI z6m(}tyq)i|maMEXB7w($AL#UQdiLCX$a1^X{+M0T2>cD!+0bJ4<7ss@XYteks}UyG z#Rc?qj4Q{QA>e_Jx`hCarhY0TLrJw(av7)iy<1`B878OD^Ix$-;SRrd7bZGiOLCqh zT-lMf^!j0`*&$kPp8Xy>T__?DP~o?q@T%r~Gh^og6Dx)4ZdXSw&OZ zZrfTbd==Zv6s%w3<+2#ay3hGX9@MqD=xw~s6DD%IPD#hlM$23Mg*ZPkRkFQT zrMPh9gcs+=+|Kxv;>l|`9NCiTTS#2_Vqt=RcyXG2NDW^Uv=cP(*AF@ef7_&HHP7GD>^(!VPM)3oWi>KfrF9466d!^yv?DJa59S- zS~GHXO4W6d-B0DD?}<5_L$c?tsVFQvm8PrC`Gb%w5`l+d?^Tl5>@ zuX-pMhbFQ&al8zjAK%I)KfTlHDiuh^Bs53xFl_SZax`LM7pT2C2?fxi%oCiEJ`bq}Yl&AwCp z!m{@$9!jWk!^;mW@H>^-C3>#O7Z0g?I3C0^uyA=Q)$oz$+F9R@%!cE&Pvz+;ZDtJu zKR(C}cd{BWE|*gqE|gi{9!Uew^QNnL37cKWq1%C}n*E~}w2AWQtb6FAO}T4T9S4HO0D5`> wJ*%7ec>G9zt;qfDoWCD`OW;39U^E}Q`%)!L_VC83?Van4snzLX6PE}70;e@1YXATM literal 0 HcmV?d00001 diff --git a/docs/reference/images/shard-allocation/shard-allocation-awareness-two-racks.png b/docs/reference/images/shard-allocation/shard-allocation-awareness-two-racks.png new file mode 100644 index 0000000000000000000000000000000000000000..ce2ce6b2a95e9542ab3d8bf1ce6e0f51a8dca4d5 GIT binary patch literal 43058 zcmeFZ2UL?=w>BDdD_cQSlqLvU5k-2DqBJWCK~Rw1yL4%xmn|Y9B`Od?XN!Q;05Nn3 zA`t1)2@p^sASECmfe=XU3heLR@7!_6x#z$CIb)o2$G7&_IP$J{wK?aSbFR6b`9?p` zzI*KGnWGR0IgBsC!A#V zM*l=#nm*f;6E4%^>6^5&);}-&@#fTfjK){U9AW;WkR!g`LA~kR)eC+pnd9RS3%=}> z;8ES-rVpG)Ui~59d@dRjuKu`#s#9NsZmU%~=;C4U$jU>NFUTa?9!l2oUL-jb#SHBT z%eT9~*`P8Fnd^%+ioLnB7O+^b86pT~`mgdY3jF6$z$%#Ysb+QMPTD0wcd?}LZA8(f z;a=A^CUeuqbd{c>OEHT&#bMPa1xH1zlJ4V{KhqpLOOh+nQt^YIJ3J7gbHt4vxri(e zSe6MHHC#g;osHh0Q=Iv}&_-(Ca|;l?Y;kDg{g$P=_NYU~tFwyb@@{W`K2$H)Y7r zE=)d1EmRXBuAJ39dn&$Lj442bO*klIVBMGypjuaX2A_2>`N-bTe6i1X8V2>AibI#u ztywMl1AXRwUI%es)l*RuxQscU-$IPoBjydd7k{G-r<6J`# zdszA89CRm_o_uhxb46VfWO=W)UqlH@D^;l z1B^yZI|I$JBcu-%ni@Hz&^~J!;M{+C!Qq5~PZXBiBa|0!o%%#94zW&lU@Fi3>HUBm?^ZmvWS88+;@$ z=g>%_o$;GD;Z9)!`+LZ$6p-&mY6Nc8WC?$kSW4cFSv0h%%^2rNJGR#`($NIjch^}T z*4cV$(J>GfQ#}x8TO?TUfum$@jG)9RX4YLRA~tSByN3>#{maSimO4_-4IEkg+99NnA} ziRZ2Kaib{I`KBm3l&~15w!O=zi(Ohf7ckag3=EMm)Wt zGMHjRe`g#@jWD`bGH|92S|BunnZflFXG{=;d*f!}dSifnpWnFP_(^6#>$eWFhb6-- zW3G@*$Xk~K{N??EcE{F;bK1pWKNvm*DBgEjWY-K>I*t1mhP)f(-Tmr};C(*OYL&j4 z>Wpepyd1F{O$r&yKpd`m6Io^$5B^}|u*GH3ef)vqLiRk0z*NaSe zqfclPgkHl*bY4OG=5Onm9_Ls_f54iy4F`Iry%;%K(nfY!nbVvbi@Y>_y>b)%>hXAB zMesD+UROwtTDx~&5i!PIBAEWM`>eU{XL=p!y-QQ%(Rs0&jcr{-h+Dn=?x#5JuVZVr zPHEedsUZXz`HJr&ry`g64Z-frBy|o8Dn&Qgq+Fh_{Ap+HTS+jGrWkZ+kGz8CuM*ax zYfZ>w??$Fy?~MnUea`9NGdkZ`=37t}m^*sZHDAuKC?}@5gb%i``!dr75kv$NCgt>gCajQf3(p2J!t9p~Lc@79U7mOE8F5>L(^Hk13pV_49A@I2>Ahr!U8-glagF0p0% zQ~q>#sd!kh5cP#we9F)@UmLIo6ti7A;SJnT>#FykZcYY;{V_?!hpcc34o_Clex@j9 zFEvwfON7P(atmO4=(f7%!ULd$XrPuPcaN4yYVhEA~BC8Fl)q z|H!shymS|L$E)ZHLa13*%xDKqpfj(eX#TL}?~I;*YhoSa5%3XJQfnMtPW6;Fjjww< zvE|ggR8JXWSoq)bUOwZ=wYNMVO8T?UB}5*&Tdw*#M5CDZ>j_5la1lIDvoJ=)v|4F9 z3+j=_=?F|~J*U>8BeI9bob<_K34KaRw7@rF)F%pi<`5zxk*usl2r7!)c%hKqxApyp z2J&GC(B|?sVa;MxNR`B$39r5=9!BR7^7wr7>PCyPc-Xj2%a|!}4o>l1JtC`#r(Bm* z7EmH0G!pmi&k3(+^g2nud&!JiaB5M&{qSAw=GcCQ(mtuBT+nd#G~mgZ|LyL-fom~G`?nM4vh~?O&`c&J%ZLW zQOq)ZZy)tBVt;yCIfIakBOJu$0CU;+tY^lpW~QOaLY>YTE>--08~US|9^hMhi#ne~ zWwNwsHW%N7Z7aH!_>{^$->0KiQHJ+c2{glntH;@1-h$m>NEhBSY~Sr_!m>AEtCs{X z>1(Db;!b%d_;E*<+=g;8-*=kO*7-@s%6&DkhoWfd*HT5{}L}**m%Hci#0lYkcJgLVB8a!^)1`A*l?d)TX{y1wp?b z6}MI4PI+c5H@MDHYx^7Hak#Pm5<;4vRsEv1{M=Yb^6=WC4{lG=A?O7}Q6FC1wp-C5 zCqXJL2$d;T{CKYsL@EI+vHgubOLq^ZO`1398GfgT3!%olKL=J9m64j@V$Rsx&91be zYg{qL`wpsArK{XJG^Vdslsozve!{dm$2(xrd4F=UOGdKT4;a)c>!&whESAVWY4L9R z6p{KwmWwYqB={OUFEN>0J9b2NuT41L5j16_uXzca3extO-&HIzx9#%r-USdCVU+h* z{(cuCa=&@s`3~1iklHtO+5P_`Ui&~~ifwIcYwM5WRajtQVZj&_`5IOhB4!8N@Lu8& z2ofo_Jl17X>L6d2a=+V^mb(;2+*&0X>SSEQ@N}uHDngq2PQqo6b3Uzsf$d`uN&$vq za$UMz|MN-@XR>}X6C_WM6D5ZZixaa5S{N<2J;p(%{K3x*K^~i4ZTF;w)z|xb_b@x` zWCqNpJi4rwc7J0D8hROweA$?<3L&xxtjbEDly?ge0zuzbR#okx!;>X={(9!Ku`oP5 zjCUnZxaTTU1t5^FOtE;=c3O!tbqU?QM2~E3ZS_fVkwuzJdqTn~=lC5>G?3j`^(R(! z9P6&v6n-oWnW;7O<~p=F(M+edhlhN|cQ>b|03R>T&gW;9o>hXmUXJ;Ms5rXW4%-ZN z;wuV7Q9o4Gq_+lk7gsdGdQ~>ffIjx;gILMGz`?Zyxp}+Sv!j-vfkITj#wnxUNt%>X z_z&(7C?tG-aJkt-D9%^zk4hH@x`Z9a2IlMMXdDw{Ho`yM_?-3OL%h7a7+wW;vv__( zweO-cn*={g;B+B*>XJ5G$v#=#!|=08TFw>cUUuu_7 zP>2V_QD3(NWF3w_BGB!STsILc7jr~_{ggIievnngf$Pn&no!FYQ&UskRtGGp$VhU+ zZz_cAASArQpNx0xwJ*1>t;OU(o3218)6>(JlYXr-ruOO*|C}%|LdGE?bB)1F5D~3( zf5rM92*gKF`}=#b!;c=IBneuA)c)NYmbDPbP?F8Z)2AHJin$k8eIo20-#ZqiK#8%O#S??>eL9QE5d$J`w<4IauLiehs?kX7@h0Hfn zmrcR>?Z?}*q81{>sx=sluPQe~Cc>?waaYB_K|-dYgQ8h9RGV&DRaRCO3ZzE)Gz22m zlq6~>UlF#kQ!CaOe4(hxlJziS`BFz$6%+Czd6K^H|Gw6@Qwy`Kas-xIQxUc_dh7D@ zE>tI?m)9p)3hX;b4m(`diQVB*GuXQPZWt5Prc+wTYxhd#=MzAPh0nFdg9Z-k(%*GC zT`btP_qrDW-CwkBn}C{Q?dXX0#mdS`2|LR&U@61Tz2laFlFfvRORH?PYfmsHBQfoH zQKLClM3{|MU216xfv_!P1}$rp+vSv9^F4$a09ST5GrlYX=>4dGBq+%Tr3<`*=uu0nN}P4=+_SY z!abyP6y^h$p; z>xrBw1VVh%iU4zRJ_ciKNhJ5GA`#HYtk!F$!^^YyH$(IRh?G0151CDITzKfo7@7_2 zAkQ~awi4oEV#-EF7T$aPAWEIPX)3QR=L!uN42rs?+@q+0l=kayiZ3y60ju$)vjg5U z886jX80n+FS`ksF0G*#8TZpkS&v?A&+TwUOcD&A*?EtxQ0me}4y3+8{kHdM~2?o&& zq55=z-_h(s7gDKp*-|DBFs)srGQ05Z9mc#B4@`RkJKLsQILGBfKLBZZ=R1FR_T&4~ zz|b2&e4E}y<9L{l$j)j8)ju-{%~-Ys>g59w2e*vtakp~T=Tq2w>){EwEE~H+SJL7L zDod=`nK6mc0=<0Qnfeq%S0|J&mUs7BniGJIJAY-u$67gM#rd{nH-;;&?0zi&=s#r1+b0WWBMG ze$2v221zael4v%^1`J5QvMm}K0|Kr18!rB==*1}KhkMMAfA)8KXG$bcobR&_LYlr6 z%y2R0*HrvU%68JntS^whyQy#xu5`U7M?1ND!H!y0Lr=O6be+ML zd>Pk#;r8+$Pgw5&hY6V&H(GRE65JCpb^t^QL-W;a|%JZvw%+^;+s6 zGL*P9JKbs)fC+EMcjY-G2LRVnkl&FL2)@-l+(C%dj&82uw^LYQ5;VqFo^E4oXzPQKPF9e>a8J3& zL!G5FdV@ezh^Am${nEB11bZcpxX@*yH7Bi9;w(KmBH93-vvuT)a*$VHAD~du5w@_) zRg&w$UBfo@S?dIufx*~5ZG)j^zr$TJt6O~qk^c2O)2kLUL(Lv8#+Vaug5rwZA4U+WBpI#htyN0*5WYZn4`GG==lWl^`v9|UP0*F4+ zRUQ~}ZJ-ZbjP+X2wo1OwRHyWWCrqYRfY%<5PnrlZpt}00k}V?Q$m_@t?l?Ek!qz9- zA&%)D9(v>kv05|XbsN5nOSUU=VG=B6F7bdIcWqAx^KJfOBom{9zxB^Lv5oJOvj>_cv1xeH9|QzU_Ux zkRN`qzI2-7SFrt$;3c}WDW<>#Gd6lw>o?4lJ zdX|j{Ao*6S{ezQ%SWU)$I>#??9Q;(ajQ8gY$IreT9?8f2;6FX`cKv8dAbd@NtK%iV zeS^{4;w0{1jIN0xdEvv3ZRxL32RSynCT1seqfjxu%PnCts)8d~1_d>y>=w6v9JHb9 z>r;-B9-74L8mq@wzdVnqs!TqRyJxBO(-Le`*E%*D@!KUbrv|XCTr-fVa|l~_+pjyn>7?Z zL8Y?bPR+=9?0szSRIBjjc%md~WHq4F&X|u$d{6px3~nh;#`p)9?W|(QY|}lTOWsopkq72+v_H*C9t`WY<47ETe2?f&n!G~_ns7Jp8aIol zt-(#wtNBw0EGY8$td35vT#fLY-Wb$w(KFg-!&y%12AZ((C9qRzSlo5D` z5tCnP4u)^3j+?<+jx);bVQenU)xDZoBm~fZaXD|o8XL6lu~u*f;assROToFzHgGos z+u$j5k#q)NZATX0d)(Klnw`xB&6T&6Q)z^R?<0Y(XrN1#a3}UR*9y6M16geH0+-(k z{`GHibbyU9j6JYazP33X^<=d8KAWqWu2?9a45dU+~|61a5S3s$ET zO@_JjnaUnkN=w5^S&d)vT2El3rIru)-bm%=DRXONiE^qw;KO%VWiIE9$RFL@7KJTK zjal8Q;~RBuBT~4fz`Cw0NhC~4az*{dDNSoOkinQf(1`)3> ziJUsI;+~-D*UdCuihE)!)a5;+oU$?Jbs~;C%Y-$oYZmJ`M?rSedy??+O!g-PL1R^_QIXv~t!!GTotA zm1=z*?CKslS~c`stSBoXl470uXk_|laZ3+}hLb?@Q)2^o#-`U*HnPRA*E+HE*}EdP z^{i-_qypms*5~^0zIePfmLBz*T!b)H?Xayxq>3cNpE}DIM zHQ?d#)vbrlhd&iY`PavqxHlTx1g$DQd3>R3b^^pMuWfItFkF*x+8WcCLo}(=RqxQe zFVvQK(0dVIAt`1rH{Fc-T;YNyd9oP2{H1kf0XPN&!u zS&}kUvXudNt=E3!A30MW#Pr+h+=W6$F&RHo_yIADbPb-PlZ> zDLa`SMD0_Hur2} zQ}9W6GO#lOqJS$RHZLQLfz;d>0zOktA+xQZ_2JXG+1bOHl6p5#9Ls6R*p>IThpu?9 zJ3X1eAo=HymM4*eWntS)j<6Y>Y}PZ6>$7%N)s~M2HexAPu%`1SaT%)`!?yBr-s_j1 zR$TOC_%>@e*EKsT-FYe9$F1etWm?ZM-Og^Eit}bWJw`0qg+P2y!F#!Y$Ky0R+e(~? z#zlbxV&1R~#3%5w0@loix?@$+-Ru^3u5%%uC+H)z@C3ZBS@vb1dni)$Rs7SU%xx%6 zzeHu~MPI286t&@Q|Bf>Pj{nfD8B};|l&o92r0!#J(*J6kWz99Q?olQ66}FD|t^RPl zL+OMDup__e(vTrCY@5hkSNG7-sTy@HZCP?W-Q)-R;X*BC)%rkp3uRT{j@&H*-k9~! z`4-|Wa46P;HnR$MSRU~UY^ubJE{4_d5KxcZKFsN5KZMilkFDKyZ`4u{VLs-Lu&Ji9 zp-IAqJK;3w-f?unZ&dhA@u~B*b(-Kwr3DW69qtvCm-N_et*5&m} zahVU4M74IM_Nl8@ zwb4N9-MtUAp3WX!lLtP7J@;q9dB9gW8%ci+CPNg&S|NMP_b(ibVuhMYg2CIYxNK2R z10=SYB{8mnsxWdn=S(gMg~_k5c@tvm(?SddZhj!;LCVZ68Zhl}=?C3R=YD6X_BfSszW{B=xJR z&gH`A*k7U~@ad?}EmloP_z86wcXsFRj-BOe z;0&@=OOMHUD5(rfRa<(g7bvi9_?Iev5*`S^Wf``LCB4|NOWTQ6L0!v-sgyb3MiJ#| zPBYi7DH7%$EfM^#vUyym1}+6#ywnq~>aLNf@}o{DyruiKc?FrLti)RMdtwuVVuz6C z^Skcf5q|LUKp%dzOC&W(#la8TyL}eTTvDBf3lt{D3{y*6pK#PCN!Sl+PF78Z)`wCPGDl3@DK_mD z$u>V1t4_|8w0<2a$r@u4@OzbDPR`~3WX$`mv96X!n9Z-1w112U37dLOFi^HwuJbiiX(596R}9V% zC>dafwr;$@rMxI?&Cyq)t2vnJ|aQ(B2;>O|-9BwTe6cuB*#1|Dzo?8=i_16$+RHPxP}8c)->k67>I zvGFShkcJGNTA%uRi36;AOZ>9{eWD9vW55Wqd+%m)q0Ur;@c3CG5g25JoZwcBu>OvA z<($I!Of37>_s((g_3Rte((M;BvDB+%5ha`8mDcz6ae69k>#Ffk6@@Ia)8MAp8{A#v zBG0l?`7`Bu##glx0>Msk;#--&ayhj0EP4ix4|H$j8CVUeF3L>H%3HZx`#w-vgg@&& zbD`riMHq*KPu9H=MVdrIJmlsf+T;ImwwFMA{Z^KKc4ye)i`c=H}}er z0#QP;?ZM;)vJ<<#j+ffqz8CZTFIH2BM?y^QFKjrs`RF&b*|Vbz^?`oWLa9Su;G%#( ze_t>UD%b{9R^?mWdIBVUzpBodAheU3G z5JTad^N*WP|Lt>Y>HVNZcJseNK!6he*;iKou}INA3;x&d&6N0?x@vd*ES(PVy_O95 zS{aG7Q5E3Jb)pCFjAb?U&P52WOVZl&^S0NSKArG*lGwugCC&d}c*GYV42<%hvs#kB z)CjoGu6%8&`hNw%98y6LOvqZEzVP>TVOpS+e68L7o#ub<>%e&Crr537IaU3@@ib?@ z*5(qA=!nN5yEFb9y)@y{FpA{#_Ei1pn5qV$_z>#Z)#qc!TX-iajp0r{Mp^I*xKa-; z{NZkXZ@pRIc-WIUH_yeisN=7(w9OGBojPbZEgD8~mT%zn2p%XWc{Ngd6k0bMsp8Bc zr>soQs!zPOS@V!n8!vZX9nMv>2r_KCaxKjX5!)n}Ble-^v$EQBC+l6fg(M1|*6CN8A z7lnEnNtZ&)eNvtnUEHPom|Y25F`QlrrCa(okd(r9y*;&>V=sHo3uc{lrY3$bq<<%^ zW^RVzn%wl>E*+Po1=PP1>TG=7y}1USI!@5_{948v7iS?z--LLjMoLYXfaUIJRK?a# zZjVHEn{SCWU)=mp_yuHF=a@)MB<)Cr>!%8hkc7g=YpEVmM27QeP z{SWk6E?N}H&PZ*_nT8!7-+&^73i>lUf+^8i$C37*f>mqr+ESbM5Fd-F(_D?5*bPa# zZF<-bHP9z@W4Kk~buNK6JKt0LHk~=>e z?RdJf;g$p&HKHp0$shV4tYhKjJ_hzsU_ZDVw3 zf%OzznA4P7LI0$Dki|0^6^v@zoAm1)hK^8o0)y z#uQ(tba*|GkHFy7b<6Q8^J~#b+K+aB-JQahtD4uCYaw#W?=91sNUN1AwL+6ydBoM` zd>X87!??WO=YXhh@9$DN>d{Ix+Gd!2or@1rA*YSkslLL!z2ebw-EydD{f3f9G>WFr zX=*nfoO>eq-8*~`|5m(zj^`t$>?_#RCaoZfr1bVb8l& z{yTs&eS5pr3K7Tdae)`R-Wsy0Bi(W^Qt>VA&vBxm+S<*#$#LVW9hc~VRC!1?pSjEd z@43;csgoXRbRV(F;ecs1^O@8(ozK)jV$~*r-mSJ8a~7bPk~S*oGk)Zxl#U>hch*rP z2SXq=48!J3?Ib2EPct^1)bYtwp$640Y>4HEXu0LqprJ*Uj$shjZjP8VLI*(#?4@FD z=|eO4BgLpQoviP}BU;WD(1`eF|S^Mz9reV>M^4G zobp=xHesW&3cRusTF#*)-m#|Ve1Euey!Jvrr0}il#A}J>fX?}_27)dLIr{Xf;l;^& zwE3D9g8RqJtUv_iV#YnDruRFk2fSS#+@YqKzd5K7Mh~J+t{htl_Gq~j)__iDFY&S( zV+}elB7~$bZ(ggj(Tc86Q28la``B*zohX)Cvrh1c9=b%Q3NJ2(h1pKkDm2a;K_0bc zR}*K$c5;j>mU7CUEb3m@XML&g=CpU>My}+N056HW5~K%#%z&eh_aacbgLHB-`BImA zEzvw=WgRHqCCRzYT><92zUmL+usJf@$E8NsjO{d}tqu8q)61|;4EBSq z6`Cq&LDrogOssJtV+j_*4A_@iY+(eAlLoa9hxRI z+KZA$LK|IfF6AkOX0L>Bx$~y~($v7%6jcC)LqtB~k^TBd796!>sfI64S3T$v`qlA( z13qlKtMhE<{P%%5H8f8a?Yrg5xO|4snLOw_bDgm1T(EC$E>+hW_VfgSd9cS6B(2=__O>@PKTIB1~7)EM&K4YLaH@Rk zaVvOdcV)MuF~O|BH$_?<>6@&&hVLJr+)emY^*gk^LMQa?Iyyo7+3qhajRry9o%R~_ z=wDUu8$mytNN)Vu0eid540Hzs@(EmCIhdR=EQlREKRX_}!*#IGZm<^@CrQ7vLs;=c zyw-j~2;5{oSU63~s2rbglyx^KDYzM*fUkA-Fd2o zrS_2u$>7iEa+BM6~$ZcGpKBc|DqhQxSRsyOX@Olwx(?2b zK_}&(YJ|U%cp!=2xlSRSrpTwTgXQ&47ELQp87UA}OBqI_aT91_tm#d$mF;A@%~b6T z_`$pOiM_ZHUTlL00Rs#TQlRO^ceB zgv`}3Tb_RX#vZ%LjGe3W&T`(E9I$PlTPG&1KWgu{m1Ehep~O><~81kiG;bmjn^jXno;MzZ?jcZeE@* zT=enXyrkBLFI<~@*kE5JV?^z(js&rTh3XSbyT^Dv-rXL{XCWx(x0qnR zkMK@?pkJ|+R%e0)nzDw!)HJN01jCU`r(BxWcr9r$B=?1v?v%ekMuLN>iqfC@OQpi* zf0-*kOu7V8Dp+li{(?f-enmS%Nn~1o?DWDmXq_nV((p`1iR2WmQwMrc z*1KPSiWf`GjXz~D-=ZgI>POcho$Z|`!)fcizqHb?kvpf@oS-Z zHFAyy3(|}(`*6Q@#W2vPZVUK&`7!n?T&YP45)P**LFLS{RICh2F#FCo|Z+lzdL6d+BCtfxG7 zfIGvpkyWqqe6_T0iXZ#aW2ylKy$i`!J0crC3QZ-Dfy;3g?ljUpMQ?Pi3j++5!qIT_ zT`ZwW!hFM=Px3d&*X)iq-a7Gq+DI66%I(saJL&ar&_z9$4_6S*niU!drly|^g-9e% zg$P}|EYywA4)Y1pBuIyXT?}N^i(0u7tx&(-a0_!Y;?tG+-n(i}^=Kg?MY=VVq%);3 zNcIChJ+M*3E4B7NWSC~#ty|7sCaZRA9eysi?4R?4yizBwI;vd}vdRKdZ0wVbf^<2~JdF5$2KyrFBQkOXPs+-Ih7(<`-{TKeZ89)c@mH(IC|BGH5`?rXx zzmJSp00Iiiz_kL<3Mfd04J+4wf9}#l10D4cfLKKUI;tB= zi7alm0FX09zSi##WDD>9qn4e4oiaFwO|`TzGI|d{D|}CFmQq^tB=7w5$Zckq*<>IDm+n0SMT*=E5-mxdqturtd@REI7CT_6!FDO8515@KZnb z?>7ev^wJ`WOp;42vF_QP03x@L;+R=5m#n9iYBTU2pws}6=>|ZU0qmQfNy)GtWoD_< z^C|>l^^}46WQ$qbm_LjVs3$T5*~ z$H>T#0N9nz?nWaQ2J}(}0?B?>>=k;o^;P5$oZIN%07=(T50N5^0N`1J+^u_LWF7`! zx&i>1_d~EX)_A5~QM*~l91DOgKn}87h$?c;6neSw@9$p?x^4p8A}>G&?`#jQxRGa^ zt$+a}w~Nfpp*tvGF++?r9_EO@7#oxu05Ctj1prsCg~5Wse9|p|`b)9QYJ6gjKMJ}2 zkg7R6v{QQ;nunC(PGu~``UUQ)mI?Bq1_K~XhP=i!-(PgPzV_(#aR@}Dh`=rqhtlsw z=_IWKw6|WH26r5zQD_U159GXv6d4K;k|iM5G0~@vM~(16!rfb<`8dpFbdBd@oK@45z8E#>lt!M!#U1Sp!GE3c&@(jBuxU_w-z&?{+#&6$a15r_MC z&O)rn#|n{-tYY9tW56`LoI8S@+ZgWvIt2J*&@I*Lc(JIT$li_~U|&c?E=&y8ig zOpvB7e+FdLxOwKID=ZhsQLI4(>c{bd{l7txf4Nm#D+eI!icNMFRFRSA9oHaDKAMK5 z7TC~TyJ$^{yQ|+fAQi?e@#%oLR%9StLhKGwO|rep32jhC0_tt%z~{zMlmp1P5+NC) zsjidNp0?{W>WT+Yw5Qg84d{&Y)6?8*=j}M*xfG=eqvFh2s(wcT%ug~oUO+9hZ}j&8 zhMJYyZ6S66`U#)&Wl0Ee8nJESC*xEm;6mmbnLMbb0`IQ+!6o1bkk`8JL} ze?BiVoQq&A2%z^b zw!lC}^POevO-5;o9cXs#`6uY}KN<2xf2oy!js?f;*FiY`pPZQg9h(Tq49b5W-{!y0 z`u~Sk3R8xJM<}M;ht|v#;Xed4QRT<=c;-VlU_5>wMj8e~cZzNyyenE@X(X9W)hhE1 z#cFGdPK%eQRfjr|T4_?+ zTx$+7)SSp44vhs@x19NLY1mUZSTsh|5_+I>Ifp9(!q}aQuz(jK9Z0q~=-zMA6 z-EOMxMQI}0XEHVj`mX=owrH+Ee*a3oyRyyKo> zROx1oj%=EC>*P>+U0a_UG{+oIUoSL@-3>!bg{xBQ$!484&;F^ zUO-|k7jD`Qa^7z}`qL!n%sU>cXyFF8&gQf3(J6B+m5V_`&OF_tSEFB*3QvN&r9w@< z&}v;nc-+JwY7u9XPD4P#F9Q$`g!sBm=Ck2}Md@lN(!#l%{<8DZ+xnPt?9Ey}V&98h z@#;2x@{M_9%f2y~u(ro9UluCWxLoI1snx@KBjIR39fxwU*x3&8<7h z0Clhld!O#*K&-sX$+^P=Oq|>;K2??;P6SZj80A zt_)CB^hOl3Tr{&)egEizg16=_lKGS&@*{)tx;M$T{t0}7*+ww&!%Dm}??ybtf&ZyE z{c}0~8k^c#>Qsa*QURfukMum+Wp{i{`cvXM=mT0Dtpv+=(ci7!aTTHCcQZqIDU>v779Q z9R?!G4Xw55101FB#A=W7>7cu;9(;uMMDaSF2$j<_ZQ-wi4??U?7oB74ReGsdZ(6Yw z?Rt$1BGr1K3t}Y$t9yq3Tmt22$=95yGB(mCN{Y2UNgAy?oOtymGfSuBvBpkvBO{aG zB9w1(zKrdC$v!l+?;s(P$W$Wwcej`04>IX1PUZ|osm z3>ZJM*hkK#ktNY&$*f>Zr}pTFhem$#9ZHr)?hXTiv3JMdCf0=Nj>C{}&y!IC674b& zhfE-ZS{7#%tVX4oBu*n1%drP0?-#x=Jv}UEquhVw`A~B$=F1RB+#Jy03snU1JKz$D z#YKPVxLAmL!ihPTG%K=#I$E)wzb1^fsQZD#3j- z5mlzm0KV}9PKdItREG63L%OnwOc1{3G3B!N{6bz(-+k=RvzJvHzFGJa)zULmnB|75 z6HG(oP?4(RV7PYCEVOuK8t;CRP9cr7HFV4px;j4gI5^Or{6+;q>Sur1@ zyDl^+e5!&CrqZN^nNO7z6M5# zV>A+EV}@*-{0^}iUW#`rQ<`$s*`_mtRGX7gfdBtoy3U%*^wfa-cen4!I^dfWB72qL zDYH9t|Fg$u%~ke)2k0GdbCBL|`2R1=!3TQBBmsBND>LfPut0|kp)c5@-t73wG+$&F zQap4rnbT>GR)%MJ6Mp&97pd}ZkYNJb>+gJ>(SAo-%x*SL^kkkqTc8wPr?GP~{kT$) zBhhvAs>HY3%2vsvdBKXK=X1^5%l^FAQ_geGlU-x>9^#(nSc-OotV;UA!ty-$FJ-xO z>iGNl!Ua&43sdJx@+UWw{*iq<2)6ZJ5tjQ$lCI``5Ll60JB2FU(UjX~P0G-W30++Z z9PR)9T~Td@?E78)Jh;U_LrUnj{o9)>>$YjuI)*+85=a|nMM(oOL;IaO7zNEB56N{g z@|!nPM_2~_@qF~AoEm26VZd{1Mw3=`>$cs$yQ-*T8Q8G8!0zC^QnJw;yHF&`h1i!X zq&|k+o`zy(q#}F!nFU_){WbEryW@?$2OkJWfMPXUU%MuXX)ab5gb-Gj9LjoH8>m*PSQYyYEX%MW)W52^CZ*S*bYk{B!L9|c? zBbqKJS*oTy=ko=QSSeeW*OY-U04PmePuxJ>@&8H*KUnx1>yM!%OXE1Btp_c`wti;Y zDeC(@+-fhy7@~i5MY{TvVKZ(H_*dA~LnIwEIY zcx1&s&g{5|ULF11w&Ix)at*|SUKtvO%5}nU&ekx&@yKDJp1t0)>m;Hp>bK7((>|_# z{j7oPGrWcrT_v$}KQ3KDj|RfZ@>`7e^;PzF22;nh74CKq{m@Dz72cir_4#ZvKjHWI z#AiGQ$LBv4dtID#MU)q*y9#`Lv!8sQh||7bp%9OGV^?rM(>JS?7cb+8Ui<;RiUC3> zkBz5Y#m9ZMTWHMG(!k24jTha9Fn%ty7-jbPMZP^YA@AVj))sn3ZzA}{yV|%`x-L2! z9}w%dTwY%r^uSYIr1ci)%bQ>5QhYUhq3>7z<|${tfQ*jA?X^%#ok}N>KX6I77b!U1gWHzmj}n3juv~#IT)<>J#@CP zE7#s$!XWR524Q#q+#=;~S0xLlc|n<&zlfxx^1lrwh1IZ^V`MrHfj|W)+-Narg968q z-rmLL$J-$9u>$OrU%5-)`hfz_z7t$GteHs=K%3qIu`*ED-o8H8yL_Qz(!?;$6$ZE} zUviMH>+;$6Nj1U8*7*}ARYl_XXgo+>1w|`IeOa2fQkA!S^wl5#HygG0!oU9fkE8&b zo{(@9JaoaxRUg4@LY~cCEQj@~^9_RUYxxt~%Ht_IlZNnzOQn9I@ptUM^|b zHhOG#OC_^DU;0a6rhoacyvpafkZBXSX?2X-mgM&#x-N|yXq53yQGY4@F}&~Sqy12 zR4@0WDGQGHhc?znhFXVkgUvZ=42)0F^Ltyt2mb!nS5|AB2WpG%paaf9m(2*;M#pk0 zo>VOEIM@q4Pja?yXry>ON_BbdoJd+3?U4`LZvHYp(FvhY+66akgqEa!j}LFv6p4Mln{%0Wb$ia@AQ5s+R&=p;xLDUlK& z1QMk8NDU#7d=v0I_da*sb?;sGeZRHd`+e`MB@6i{^PkzXXV0EJzx|u1kc#PSmuP9W zU?uf+-Fl?AORn7kgNT^wS-Xvdv-;>$^&9BW1;eDJye+@2mWNS=?Ijrwm}8tb$QVGj zMqc=$3g6hV+gXZ0_s6I(Lxlqa0&y<|6Wg}Fy-#tfH2Znv^Z_iy`DfX(`sDGFw?i{p>R~Ii_PPOjViKv;KOlp;z=wKd2DCk}wj+lqmeMSZD57GGT4J&=)ZsmAt z%k^c6d(Z+SjC1A}YOzWvqG@CE3x7sx;@g&d6+$;|p%9P~s%qh@25sj93SedSBY2^! zpECVcu}3!3wzhN8UIWJ#_o#akQqdjdi>m_-%j-T)MMbTwqPDQBnoA8n<3yhQS4uNT zbj`gzE0s7`I6qJl)jU8j3|x=kG47S+{!UJ-4!4x@6utlG#bg!gYp6N*5 z7-rCP!lT~D#+$IIJMo8=vdz24n3_QCXDdNgE0NahxZu*(nm7{5cD0xN&#UHGSIxC< ztAgIZ@7X`s(n#fmm{YC=<)ijd#yH(<@>0*_V5`1POc5E#MpkM&V5=Zj??+o-O|}1G zkMxU-906FzjZm_b+aI>y{R+q6ELnbQc+6)uyk@zPx3E@ zy?KyMmb8N9n^o537XT$Js_)9>;Kv;$A$xuj zX$@&MqlopLrd^)y8=A2*c^OD7Utu84acFWTqFHrtUq^0tNaOG)D}D3Le3C6mWJh2lLDJjvGD;t_g+(T{$P8k=E1TY1CK~{2i z-jZ6>jXVT2>t)6Y(_;`2%8(dDQmaKEHBncth&tn?b}l32S_8qnyS!-GwwSpB2}8eu zFfnOWb=TRJS-2FfyM?d;>yE}NU1ipq$t-D|6wQ1YPw>6ocGbn+6W)Nn0RjZ{B7N2g z`WjxX4=v+z8xQWz!wGRA!DKD_! z-c{?Ho}gBIQ4UU1TMK|Q=q2Z`zkm*v+vWPVDO@wc6e*QIUQ~WhEPuP<$V2dTXuBMN z5ca**Y3OLaU^uf`l$!-xUUn|7?1o5+w%yUa(C<^pKO)HnyD05G6#e)>BpTcgZuG6h zW_!B#+vR5?(!LqR%?ze2t@g&*3MFohSS5aPlX4tJ&nNfYKuG1vrP_84uC}PN;Zh?t zqDuQkMeKo5uH{S8?klPxYgirOEGzZ9cINknb|8+iVx!8c(->j*WMp*v#BclI53olP z)O+S3y{D}VV)>Oy@#3@%_3o|da?(m-sbGj3 zu9JCNX1$iLv$D!v!Fn-~Dr9J!tb4F?>3G;2qnUx|`j9xDPki8ZVMnsy)$%sxjnoKC#U8k6VPX1yN2Url4- z8?mgq<-c^YdnMhZ@(>x)LW4Uawd~EGZdnL)$y|U8$wDl%|H9~y6wh}1UD;Ak*VDGJ z?1tkW$c5}8J;gQUx@9jA>Mq2i{Qi!IA*Jy)CV^;ww+ zJB2^iwCXE(x}{m*zzjsnC1+4KIwLEYHI*+@x_nP>zU&v3uu0MrT*Q{`&NX%_xfgMO zxdQ%@YCqXQ89ZiAjnXPn240z6il%9~PSFp$J`m`IXm-xO2CQ{&eiGIQG+JrX` zLVakw-CMM)`*Klpli`e8jHAEJ4A4LG;;Xxf!tK_!GL;jCx`KIuAL_C$BB-K0#R*b23X0UH@Xxy7nEwebgRd<` zk^5QB6>O{UdmmNC$o;pzRa#0R?g01)WKjMw;Im-~<^3-JBdLFKefW_7Kz}^)-`BnWVX`>ikBVDhjLg|rXiwF~` z8Xr;tcw>pPO5Va3E)*;0+v-OJv`l2>NA8DS8V@abg>QICR+cN68P#`6_(IQv`R;V! zpE4-V)&l&?e^w$FvtY8ooctaAGM_i5vWgBqi!wU?bv_D$88)rvC~-5xfsLE{^_{l|+(>|Z9+1AvAS%#43Q$sx0?~1Vi zPiCVf*8q+Z`ssL<*ul@@5-sq$5VJ}9F2OtGbTAK!oHvTqffpSA(>F5%{;P!chouLJ z%A@5^wCHYOOe}|;?K<$B&%Lq5mLawc4|>_3OE=b1$y}Yqt0?z-QtCE*R{DH-HyhOo zE4vGBq1zHjw+>=fAlsHBB-^C zVJQrDCLgrP53K^smJf6S%9594)Y4k%@01b?f^8Sm)wpRA1~_}yzq!MGbW@X0dODRQ zc`yL_rNUGAS|bPys6+zkZ$uim+|U*-&30E`oU!rFeQ4qx70;MQ2Qd!I3y7-Us?Hb$~=2S66-Aa zQ^sbkkUb_Q-~C9cZ+A7^CE!x=&xmESVC?-B2Su#EJKI$S)(~6{taivvk+0@3c@I1F@-Co>^!hr1P~Lioo`PPrn|MB z2o?Yc;CFC))J^a~Dz>_IDZU=O|2XK^yKPxaO(U$@ni9h1&c-JD< zB6$KnfgN{O%ve)^WPmlS^|}ZE59Js^v-SZn761T~^4G;FI!x)YPm}rL;HbJF)Xl{u z!@}5DcD6?koL(ipxdD(~hGu{#uXpnLP^>UDuxqR+QXRmoJnOal$ifSnK2w<7QJOI7mgDYJE4f+Y9=*L;@wen^brlvu< zoBOnnFHgXFlnrX-gB1YnBg;S0K5DD;PZSju6|_2`is}r}iW}eV%^qe5>+_?MXSL|G zk9CM5)y9ECWB_|}kN9WWM`Oylo(w?ys79xK3_|}+`}oV-hb0NFIhh$=MEAa<3<4$R zm~xhH zG_dL?_|ff`dvsKqkgksXj|9@~dxX-DFnm)N(wDb@20H^HP7Seh zbcj&%*?3PwAjH_hC&&?JNnEO5o~~f>Ey`hr+e$^yvP@XAb!TU?X!KRaD?f2{T*m;V z@_P)+R6p6jsM3`Vz+-O&1euosnIl^LLGzqfy%N3VpX`rsn`UNa`n&Bs{e5NSUb|Mv zJ|CC`7VU^?na#^f@ZO+PfUh)_?O8A#V#b~bkcWW$a3wp(qC7Hjw;qfV9DAY?1UJO~ z0?5OTOn3wqq2Hy-M#r+X0o?2rz{Gj1wTD+!;b0IM&Wr=?zhCyaK}=5KD4gqG2p|D3 zwdXQ<7N*qiELlMIkB*Lxr=vc9i@v5qFmaPSnqb`nSo-Fk)ZeD#X|Djt6<~Xt(Jz3v zV|MG8TJq+>mOwcP&}~-A!(aw^x|+2G-tlxSpDLKICjcQKXls!%)pc;`FRY=lu`#d; z?BT+d8V^2KcuTJo|0IM|mG7wrv32ywj@p-#OKfA^Cm9;6AD2xlSJfGZo6&p4r+n~{ zWPr;o*Ip9(JXQ#rX#$>$S|ClM-_Q-&6Lvs82$()S^R)AVK8ZD~xYFUKB4e0T1yi6z z1_ktX0g@^EbUGDdo=v+Qye1WvoEi!kprK*HTlKn7Jt0r~N&5TFBmkHz9qsAt>N>dA zpXa&ybB>E0Dl&u@U4&X|81wzp2hXL2b_{@RBwvAHc>aZ}SLu3j9PlZEiN6kx^1khY z!@z{W?2Az5Km9RltoxmMZ~y}~Fy6bWyS{#FpZ!rpM1;PyTISm>{TpgWFc4fqUHM=| z)EEX#nL6L=U&9RALjDc4@TF@xv%b{m_LEmCW6+^^niFI2>RT6b^Z%ygb_x7m#(-!$rqtgF5EJbY2EZA z_j{$))rJ~p^s<8J#*yp9s&p3M*2L?sv!I@Bf5__uugy5ZRVUy;Q(!NH0*)Vp=>P!LAQn zL;j5Rt+#H{%0MXXpr6_Qz!T>EBPqh^Z#?1Ke}M7+s~@X8*Hy)DZVE{YSwkWY-e8xu zNV5Sh6EQXMW`qpnz=`gX9HOc#e=3xR|{_qx7GC)g0|Wdp&(cUW(EZFJ>HVSDAdXGqEhGohOU0e3gKM#7Pfl@Vkn zHDYGw2PKEBXQ8IiS?d~0!76nqJ&{@>d}ybzfS{PB=qqu|S9l~o*&k~kO@SaS!i`Ub zWHx-8-t-^(KqC8#?7$+OdMTq4Jw#rpCgN9u)&i}k)aeqEjo{0V%?d}WV}>jye;gwGaWowbvDvoVMI3DdPUdMglXypS!QX;ENrdDhkSbYKOf}vvQ=64@8q|6n&VdTrS?#Q`NR)x-Xp%}*xWzWK*8i;%av$A{<1yf+chAp@J)id~#+>+P1f_8>rt==#@+~M{h7J_&uVi zSvEho(l7G?p?9VAdj%|mxLT`hB^z2?cp(Q8vFaHyw6cO43+dk_y`hDcQxxkcp&VMM zX@&O}*`U4?fvbDhR~xpGERpwulz6~@gMUXdLOGnEiGPZW5Y>$0SME4Vj1V8@ZyFLJ znrc6^*P8QKnicfYRzBQv`&QSnMywgJNkK^O%5|>cFhbvyl1%va&V*mMb&PC&ERoRT z#HAr2OZ@9A+wJ(i05AM|z3A^#BpQOJ^CLQyU~dp0Y!Lf-3yYbf$(5|Y&Fnt}6F9O< zWi;ovIekJ+VzrJRM#TF&(gs0}Y{AM1va4X`E-& zCC+Rq*~kR&;>d(4Ot5)XIl6MmR~RNYF6nuEKuo9ESp!T#vJZx<;WH$J@4$ z!d2U0+aaO!zL&VL5O~i@bg@S$0)pII*0a16<@nh(-@$=eVI81tzlnr8!AKp1F~6L4 z4}OnH`z(st4M+UtUV_uoQM1L~x>D4IK;L^9>+NwbL(DMw)aLLBk7APH^=?8#v;#?V z!2gw^&d#2s_0C7Rkp=Pe5`=le^G%-4CH(oy#d``#1+A78=hV+RTd`hCMXh+B zY;=%>dAezxysG@&n@j_QF+^`+*jELEXfiqU zu-DM>)ji`EMCJs#fzqTaN8zu8BdL%7lF=ou2T@_`8D^Ol-LbjjkSuf{pkIULE89zK z8$;x~EeQ5zv2(=7uDi}KThST3+)>d@fCxRsGn%_Vu6w$4YqU(}Onj@C+RKmIIcE)~;srG&O}V ze?8)Q{dUMMSodJc{l5~}SlcKp1mY(IPb(?hg!_ri>fUw~@% z?6A<>;0FZ;=_F2G#Oc*tZh6N#j^Lz{**0Qa)}z=SyW=Y{WbOF*2G1odJIrZm6IZ%8 z%uv$F*b}P=AoSmNB%!lSUN;tD)4wfFUSI-@G|k3PG@X1Q3Wiedse>^?sWi{ z;7{^6fR%#Uz+nHwyC8UllX~1fomsjz7ybm=yIxb_qWcs zcXsgF?Zb%C`q;Ts90iwtq#Py7P5Vdyn%W0zzKh(I6BK=$T-J#@J10}@FyrrXf?-8I zDVn}oiX3o}^FjTjI?9-&HIa6^L1GGq;t53EQBWRw(NGtAC;-gMnC%ec3u3&ogtWlw zUhhv)P*iE}q>dTRlxElY3`QDK#ZK;B6DvftEtd65`ZaWVKzi$rc;_?&!>_?n)*nX+ z@LEwpI9GFor?j}gspLPee7#~2bykith=rP6|Hj?rR6&G0@>ZYAT81Y)9JM#FaSOCJ zy6khZqweU8(^Al2z#{op$%R1L0@v1oE*S3LDttwxDhTiWmkD`yE>l!%SRs-6s;dVN_^>LeNyX*v|bQO4%{@ty7H(YKEVJSJytYHQRTLmg51m2HdS#;;P5i(}(8 z9D5{eMVAxg)0WXzkSoA(pu&lFV^CsVh8$&8@e5OaViV= z3GoTrJnUdo{qQZybNLULZ9y3u*R2l}?oXS8ekZj-8bR$&4V07jBQ_!YLT$25_0Spk zl7R0-+~n^~!sk9k?orzTH*8y7joeNrBPMr8IPLHpQ_*+BQ}!ZTabp!@1~!s7d9=k} z_i?CNbUB9IOk$=}dJDVjUrA65yrGPi#Y0%_lL!UhV)YdIN~WpOq& zK~E|P8SYvtp|jSaKV;5R*zyaoPegnw%Q(W-J5eJX$Cjhtz2?+9q*)Rs9NdOsH_zIX zz8{9sq88_3K-^qzWRJ>-?f`FJ4MWHiC?SnDHOmUp1+rz>4#s$A?na8erY>d!7e9WXxSBc;8wTSqWJY~l;bhH|_| zGS5}O1nXU2dGnK_&!YdJkgEEOiB6~N6r?gMAyqfmfjP!J5sXtV`hj=cEoG9O_w6Ed zY-&K{oQkY~48Gq|qEW`m&^taP>{#?eGFmToYfnOkm$6Pqx?#o-zBi z`aacUMmKr3j+?YOOiM58OO^T}la*XS5S_)_h}apF{iv*q#{g-WiPpP*3kKKY0KLI2 zK66r=oAYV5whHHy zSIm1va1i#7slnc1-e#R5>mIPpqSK{jj(!r)g?I~iaYybSX~8raeX%2m7q%9^%2>TV+-GlH#wc7`7fIVDor%fOg{*9Q-2`qx=RF@fZd`>NJhB%V=OwLf`$Y;F{XR{zXzAh zQx?2?qzM5b&`CL>HEiCf|3$|VJidiWao#!IR))^;o+4qh=B@;hs!dzV)Lt- zE^f#BDt#f#?&;@A-oISyrasa$K|2Rhc1aVIW)DN?Dh<%!Pj~ar4b!JYDXKQ8q9Rtg$>3)+G5=h}}=P(VfXz^;NdLM2G4R`~_-=^@|; z-6os93_m)z5-|A*L|PaEScSwPAi$#g1|-F-)$sK9UpaHCNs)=+Tbf~%kqzuw zK+f%`{~Zyc{~htB5p6Mo;NJ&_$ZD#RJ~-f8=?IMfc~EC@5Mw_$(R=v65YGA!8%uA6 zX6Vjz<$SK<6N3X8!Mj0l4V(@Hb~_#X!Lx0X=KlXywCz7^-hUjsQ$N&INZK8gN;{-7 zrLIJ-RMRL6rBak8w#UEhW}&F6?Ti~@%k7M(z%@jV&c@$!n>YW8kSl$@?#3f;t4wHCQ486N*J0wcVj{fKaDV^f5)SnTz?xu64wNMq*+ zG(2viWQ6EM4XdlAH%#&?%6{7!FU_ERv5z!XyjtxPI;*gHPiafxhzRVgt?2Gx z@`RJ5mpa20V%HmnPz4i0b11PV%<;zjWLBD2S>X^RWE-}d=r?qH@tCw=LQH#t5_)3Q zO`g@86E(%r_3TbuylZCX+4gw^ z!_u%Qk5(Ueqt(YhMoUYcI#ybYuC|H>dCKw8476x+;*ty+ulO!LP4VfsaB=haebfC% zhO#{?d!ElQ=)CB}vJ#caWnJoe=8B=ueBDY+Rqzk{sx(WDlECE%!kppkq9}Dy*nO*G zIpmzBF(A=M!sS@d+G4jSX60Bd@}5vbVHDq05-h-?-Lv8PZmu_~3Zv3At4PMgZMqIg zQV14NsNe0a7(OAWn0Wqc`smWk6RBlh@5Q7FtNEPeH?9(UN=DPXmd znc6HR$E&rMbe?JUn$gx6FO62oG*jo=`NsZQ*;aR97%cfLxT;PA4x>zye7n>meuR3j z(9kFG7LI^Yx_>OcYD$)y=>1)3^U>C4QzctRSrL+{6?VrhYe1Z$kqjNdOEK_68`hJ`;ex%lz zox_EZ1gY)7f{XcTKP8XZXGTX-GEnZ2@1l#V+L17D zAKj4|ZNZ0xzU+4y#M$4x1&|wKzLlrlituCJ!Fi>qsUE$VKLpdDlJSB0F%6S8lk7AXWuWt@Lrsgi?^kC?~~H}9C&B2g4mzm)871ZuxGvHF&w zZa!FvD}2W%3yw9BLWnQ!v~m-jF@gyKN5VREnomJZq^RrLsrLTM5f*NXqL=(TGl9KBR+SiL7%p{ygCEn#BR{)#?k}u4?;IptjR4phv3e`)kheh z!gO;DkY@l7v)dTZn2tjp@!ngFJJXU-HA)_`W_L8R*s};Nw=r|0Ma9>qCFYU%VU8u| zQ&N?8ckLx7m=$}mC5E+)FqG%!rkfXE*D=SOmBA{}V_nZ4#l_dNg)FLbcVEx3OJMlw zJm4okR+?n&H^+DNt7`)-xBPqaA-J`vJ0pYF69A6^ASpAr|9dfe1h?w~PnI$k6K-x5~chlPT z>&2nFp+~zC28Q}pVY*_Gu3K`7RIJJfO~v6Qi$IoY>tw^n zZTZuA$Bc~$xX`9p=PqVIZ^b2Y;-=+Fb`!Y{Eid(Btnivql2g+8v{zU&t!e$`j+?x$ z!4&OXhxohxRn)KMv^L)luoCKgmF&=_&-Y_+d(`A*|CJ&0d54$cW>e*;BVHVmJOoFN z)m(rUKT!@AHg&fxNaf-pYQ1b(vDXy|IBdGgLx2#UNWYzXT7&peIqgT+96a@N{HgS* zg+yWXqpQAqlY{}ci#MkmYI>SDAg7fMMKoZ37 zTi-_x&$2xhmyJH}Zux21}(rm2*q|Gj=8$0F_4t<`jO`&hDGc;CQoZj9ZK#{e$*Qw)#-S%axUs2R( znor;`Hmt>*AzgI6 z@#<1nEmNW`ht`gDS>vl{+oXv_)LvQKG{HC+HN&1F)7&-7Z|Cyu*LkRczLg?rC)ap)>wMc# zvLxcjOm0YOA9pE%mh2X7;%f2|9lR!F#f-XcXLukFPK-$x)4_PTjPK zUgsq{ zT0AEpHobbt`LDlS{Y>@oDMNlTG$iOzcZz|lz_ufnP5L=PTC_jh$wz~hmN_p zwasAZ#UU9>|r-h+Rn(3Wz_Y?HV^aMTorEas;7L^{>;1rIH=A+~bwE4R%;t>%m!o}4=)v|<=I!+3;& zT@*lCECs-3qRIsTpAsI{c+0p35SXiSLI5UaT+HDO0O5f0{|Ou_9V9|8@BXV$+N8r! zBa5AmD!s#V3%q-qTC$4o7~2^e{a&=cms&l>AO;!u4{PMLTQAmj{D}hIk2)M|u~ts* zH^Xv2Hj(=GH4gvPM*X|=6aT?(m5%ty2$^@nZRn7Az!-yKSy^)s6)6E6g`hOn=N+}l zQvI3X_@9ECX4JB^9j9_(fX@%aF&Zyb2W&ba#2}Z|l<5e(pL~}uMpt3zZsxu{xt7E;I0A zZU5R{`qMuE2jiRbUrJ_kg%asR8{@y8vq1|>24DTBH_xb*C9HPQSur9fSDL&O4j!@y z2Fv$2(Ahcxf^f5`Z~;?bK*tL4l_bFc?5~#R0AY3<0N-r?=E!qrWGsnT_1pp$Ae)q8^92+F5l##(@06NU)U}g3F4=a5IH8r|Kz+F50 zHy>vOSnd1)1ojxbqxIvXjsd3reT-dE5=_8fz6XTg0Nv&u-&6taI$OaMxM#Wgqv1`V zR|f0rwHwoV9RGyns~E{Cnw@n#23UPmuSwG0v}P{au88hmv}^_7wb#L-mJM*$NU_D1 zpB-%zh!!sZbee!sgUm)py#0NNbTUuC-1~rYsZVR%C)+@(Lzwu;$cP}md&KX@XGYOK zJN^w%W`Ts9QKpvxAPAUzvtrlHqt66K|0K42b8rQ=D*y;)F!>_D<<^(xT?d0^C&15$ zh=_nv`fcZpR{+(g6dj_uFkIGdR)hg>WU1IuGwein0%Rqey@KU{b@LjyBm52+@uU@} z2fi1k>_F;@t2qEsv){R(qnH8H6ra7vtWa^GVjnwu;$J=-+v5X)B#T-Mo&&Rj5fCOO zCSaS@tSXUb5AGg!1t0wr)JtkS!dz(2yt!MimhGXeBgVjRN~rafk#LPZG&t|PXI}&z zjOh+Qq9CjLZ4&^>W<8pF5(I$=$)U{60P`CA?U&p-r$BB1A?6aiX-D}2MDfBB9Sz<1 z39vPS09*De*}yK%Oc#s?oE75j>A9W)x_anw#5&Wp=fYhf8toH%{|Y>tb&JPu{HZ6l zncTEAvJUXSUO-h8wY#9_d!qNQqz`R$j_+d_-vXM;->j7Lgs)#Owq+h#UoU4}&RhB` zkPjHP=yO6j&@wT~PB^oopWwf`E*|vYD; z3+XK#=qmR763?ud-ZQcf9Y+Oh#!k0QUHBKL0`Tv9o_gNBHQiUhil_;B3*Z+l3X)*3 zle0`rOzoQqVjOg>qJTTrzJSP#c5{rj8vlTowFn-YYSeLZWk~ywBDg$0TmQzau|T?3 z3|+eWsA_;pE-<*HFg9IVQ{%r3X4pt00Ai;I85E~$M66qP^GB?&(!sM2ukg$Y~E(_b87m@yIJtN%5k&d{VdSMhAiUkP+v`)&D<+w|>%s2g6! z@|X`$1t8xJSG1mBA_6F6wB}cUn7_IiTZ%v=S~uzduyal4@3ZhGa;hSNPYVp?L&b?z z7P_%S*89(fUHl5h1(lis1qgLH&7Pth>%sE_hZyWEP|y4?P+T6PVZn3nBYTcU(hVWN z#IK6GgZ)vE|9a1%n-%(-1l{`<^9T(9uSaVRv%y(?K=u{%BGA-g+Y`ymwpj%VE0zxy z=m2N6uUw~rFdKKz#>b}sfaG*%n(rCt5p~}xJ7)Wr+!`~86^|mJ!XpeCxn8piv7AzI zrymLsVwZaqe>8$ruh(>Obt$fH|Mf3T#{$r~PUs2#Eul}d18`Q#=F)0n;kOWl!0tAH z^KSd61cjXTs`_b>_HsYrSVBsxHR2Z1;=R111j!@B(Q!T<$&y3IPh0Lpwa35sN8`%w zcQ-xeUpC&TX+95<1>RdK%I)VPsK_fG`^ONptpI|}Jkf|->0ln)sR_~vHUlKnhyMBp z!R9vfA5Bhy4p3KSX6!^b%{nwUNOk?^foGkK{I%S089O{LU%#?~A=4>xp{IZ5Y3vKy z)4@lsv`bkBSMO^Vk8w8S`Iuo20O0)#zsZ+Z3n@sJ&!cnPkFKr#B-`9(p$}2gVfN_3 zC={LpOwfCDygY;QC3@CKHz3NRmoBy;+QD!8|NZa&u;fj&L@KOW7rE?oQ2yrcq+?y3 zu0h?pzhauWRJHf(hI#{$Vg8RhS^19-4gUMM;D7I=Lj4fKsYydg;n|!L`{uSo7C48~ z;rB}l5QFaa5WG+k38gp{5O=zCR2h;H9#UzB092dEPV@XCmufj5MToJ*PUiL2ad1Tf zL%#|a5@pOa3b+G5I6(veIKA;TNaKdPFj(puz~85EX7YST6db9~83_RVZzUoC{u%hR z;?tnLLhE;9h#WvW&K$odai;h}27!17bs6Lg#Y+c;s+^A`t$8Gq{AGX-eSlVG5KGk& z9~n!h$Xt{=I=#Wu8Jtwqs(ENbBgUYXY3uHi>2Y>aZTnRhgAt@LY`SlKUQQM?uS?c_ zdqWLp0-(p@K!Remy04ZEUh(J)KF*YD3>dPu4{R$MF(!%muJJs_3}cHw?4_0)#eYt1 zKEwcS@&p8j61DBK3}TxA{mYP5(5Cr)`!#!f;+*-HLi^^{;YEI{8iaFM+kg`!1uhO% zVd{5mz&KYc$+D{F8pp@CTK0JiF`8DgG^^tLb2?@FX7grevWWs_mY-&3vgZL&U!!or zbLskI)wb`H8ZbL_R5^1PCX-HvHH=V|B&Hw+fx-sPM5WEN(sFb_L2pBRm@DFwlUWi@ ztEjyeqL9SvRpV|L)uYIguaH#q=WJyx#q}}pk5sr672N$q`Lp1>mDcrV8l~xz9(clG zt1_%vRT$KWE%*G&H+4#V4vQQj`(-zCToyJd05TTaw0qNi<0aegnX!1^_Q9-mN*+k0 zTffZ}vXS3wE?2@5KJ@%ZSl(|y7Ly51!e%de&oMPF7QKflOQwr<6}0*}J(BnUuYnqK zb(1-wXf`m9T+t|t`m8%w+&*4=^=Vw|cKt&rQqJz4gsdE|65#skDpv+1iFYJqA+y}f z4A473|6>Hl6#PS%#^zH}fA{C(T#5RAjtRq=X1?C}Ftw*?&xO!^&zM4qd6 zoukNX8=oo}KG(G@uGs2rkcCPRhi>c@wfk)>9B<%}>nI0l7MA8On(O3Zq~(K$3Toz+ ztHn=;UCaTy1;FL{Ld54w##t!@yJ^`ws?(_wmW?+^f)}F)aL%D@6F&gZ+iC~gwRDj_ z!zXp(dyr+<|FAz)pR8=P6US~2z;((RxdEgel+VlzjRl4 zzDPRFkQ2vu@sMTxVEOlZWd?q`qRV5CeERiy7{^g(XEf|H>%&ve-;xnoe`$0l<@}Yx zupeTtq`EpaM%en!|LXIrei)2V`n^zN;k%HOq?=duugYC3n;6!g2zwk%kWnj46KcKi zhf1x|Sl~L5+&vY3v{D4W9l|{=Vk4dMCuuC6OuZQ0eWS zKR}d$!SLwV6Yh{fZ}Z418;`lxkeJ!)N5YI*p@$khGR~Y>U(>*9R@NI@;zFzxh%A_w z9#Un5F6^vckcDBWQfNYNAhA6e6Ym~=hw9cRqmwE1ZUf}9>5a<<^?@4MBCUhYK7Y<= zd6XS&!IMhZk~z=jNM#qYMnkmn2+`B?rjtme$k~WZz}#D^7Vuqmx7OR5OCS?*e)3&; z^|^4wUl6SLILpMeo|~OsNFMgd+zqZ^ zvh5rp*}AX|u9I#qUK^*rE%))`xWHhUSbc`JE6X$piHIBoYm>e*_15D6yZN3^xSk}7 zlbh)4IGxIQjXaX{3@){N41ScPgl>C_UR`=t+ZyaUv2%{l-;=&0_lODy4&3SD<^p$a zd{lHiBxTwb)bpI}se>c@gYN0pH!fLK+B?Hp;E}IW5MK66Rh}KenTjm6-0*AzL)k}o>eWyKDF%j53CZ7O zgObp}+{hQ2>BL}0hD2~mB|g6*&AiM&~Z9dCHk zLL7jp9$z#4x8$V%8ohMg+wm3{yhT5PfFwbc4yO@c;W2m)+41M7N zT0oKxOw!cCp z?gXsF+k)LAC}FY-8iiLQG;f-|Y%Dy{whql$j>F0lY+3i8eyVtc3mM52peza;8GreU z-2<`I_QG$SU8OPmj~|N8KDW_0(`8Y~br_y=zYlius!6Yc6PQh)JPfW=Sr!(@WeVKn zn(RB?m1<4Y7k$}$BV@b!pz_UV7^=oDWOj(pa{Y9r<~XH*NuE5ft5ytRWJ-#FzhTmzj?UkU<4$`OX%?H|OyI;m2*nM@MY|8F|M^ z`ws9lv5*k`=wZ@^jdVS$5`N?$V)ssVt&;M1Z>B?6J$z|1oFU1Xq{m;YQndd9hI|M= zq?0xjN)NXEXGJkyrG(htY#?`_q^21@BsJB;Ggt*Dm4Mxq$ID`GHW|yRus>g$?w&@0 zFX{1{94k|-XQffwapTcvq2eZ_Pv=pm`GS5O|Qwkcnm#iUlnlB@&{FveAf99lF@IK+~Ohv z(el~ze-;w@uT6KdLd&Yhq;Rj>COw;PYTn;@ojcxipb@E8h^k4Xb7^dEVjsKAtI9_; zxG_Id`xW{Kzf$13#fXxSlcwm1QrxyA`@T~QqpQl2&RB0JKdsnjWeqd_s$$!DI$lb7 zd8%V%2usq`f0JA_B#(DUso3l&gbok;InLwV|r2vnfOteg0TsnN#y{DM}Uq2E&`&Mr> zHmsA~!iy^=DYAsvW;|N77jZDJgtd5LHkI$BT#FIZvTVcbZI4-PZHm>Z$gM8A+jC`J zt%6YB9vF> @@ -19,22 +19,27 @@ allocated in each location. If the number of nodes in each location is unbalanced and there are a lot of replicas, replica shards might be left unassigned. +TIP: Learn more about <>. + [[enabling-awareness]] ===== Enabling shard allocation awareness To enable shard allocation awareness: -. Specify the location of each node with a custom node attribute. For example, -if you want Elasticsearch to distribute shards across different racks, you might -set an awareness attribute called `rack_id` in each node's `elasticsearch.yml` -config file. +. Specify the location of each node with a custom node attribute. For example, +if you want Elasticsearch to distribute shards across different racks, you might +use an awareness attribute called `rack_id`. ++ +You can set custom attributes in two ways: + +- By editing the `elasticsearch.yml` config file: + [source,yaml] -------------------------------------------------------- node.attr.rack_id: rack_one -------------------------------------------------------- + -You can also set custom attributes when you start a node: +- Using the `-E` command line argument when you start a node: + [source,sh] -------------------------------------------------------- @@ -56,17 +61,33 @@ cluster.routing.allocation.awareness.attributes: rack_id <1> + You can also use the <> API to set or update -a cluster's awareness attributes. +a cluster's awareness attributes: ++ +[source,console] +-------------------------------------------------- +PUT /_cluster/settings +{ + "persistent" : { + "cluster.routing.allocation.awareness.attributes" : "rack_id" + } +} +-------------------------------------------------- With this example configuration, if you start two nodes with `node.attr.rack_id` set to `rack_one` and create an index with 5 primary shards and 1 replica of each primary, all primaries and replicas are -allocated across the two nodes. +allocated across the two node. + +.All primaries and replicas allocated across two nodes in the same rack +image::images/shard-allocation/shard-allocation-awareness-one-rack.png[All primaries and replicas are allocated across two nodes in the same rack] If you add two nodes with `node.attr.rack_id` set to `rack_two`, {es} moves shards to the new nodes, ensuring (if possible) that no two copies of the same shard are in the same rack. +.Primaries and replicas allocated across four nodes in two racks, with no two copies of the same shard in the same rack +image::images/shard-allocation/shard-allocation-awareness-two-racks.png[Primaries and replicas are allocated across four nodes in two racks with no two copies of the same shard in the same rack] + If `rack_two` fails and takes down both its nodes, by default {es} allocates the lost shard copies to nodes in `rack_one`. To prevent multiple copies of a particular shard from being allocated in the same location, you can From 91f9cf7290786cbb3e4634cca1a69834f61bf66a Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 8 May 2024 13:03:13 -0400 Subject: [PATCH 006/119] ESQL: Move a few more test out of IT_test_only (#108377) This moves the "skip" logic from our IT_test_only suffix into a new feature - this one is historical `esql.enrich_load`. This feature is not supported by `CsvTests` but is supported across all tests. --- ...g-IT_tests_only.csv-spec => blog.csv-spec} | 3 + .../resources/enrich-IT_tests_only.csv-spec | 350 --------------- .../src/main/resources/enrich.csv-spec | 409 +++++++++++++++++- .../xpack/esql/plugin/EsqlFeatures.java | 9 +- .../elasticsearch/xpack/esql/CsvTests.java | 1 + 5 files changed, 419 insertions(+), 353 deletions(-) rename x-pack/plugin/esql/qa/testFixtures/src/main/resources/{blog-IT_tests_only.csv-spec => blog.csv-spec} (88%) delete mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog-IT_tests_only.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog.csv-spec similarity index 88% rename from x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog-IT_tests_only.csv-spec rename to x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog.csv-spec index 6ddc9601db4a..64c4641b2ca0 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog-IT_tests_only.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog.csv-spec @@ -1,4 +1,7 @@ +# Examples that were published in a blog post + 2023-08-08.full-blown-query +required_feature: esql.enrich_load FROM employees | WHERE still_hired == true diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec deleted file mode 100644 index 367fbf044dee..000000000000 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec +++ /dev/null @@ -1,350 +0,0 @@ -simple -row language_code = "1" -| enrich languages_policy -; - -language_code:keyword | language_name:keyword -1 | English -; - - -enrichOn -from employees | sort emp_no | limit 1 | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name; - -emp_no:integer | language_name:keyword -10001 | French -; - - -enrichOn2 -from employees | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name | sort emp_no | limit 1 ; - -emp_no:integer | language_name:keyword -10001 | French -; - -simpleSortLimit -from employees | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name | sort emp_no | limit 1; - -emp_no:integer | language_name:keyword -10001 | French -; - - -with -from employees | eval x = to_string(languages) | keep emp_no, x | sort emp_no | limit 1 -| enrich languages_policy on x with language_name; - -emp_no:integer | x:keyword | language_name:keyword -10001 | 2 | French -; - - -withAlias -from employees | sort emp_no | limit 3 | eval x = to_string(languages) | keep emp_no, x -| enrich languages_policy on x with lang = language_name; - -emp_no:integer | x:keyword | lang:keyword -10001 | 2 | French -10002 | 5 | null -10003 | 4 | German -; - - -withAliasSort -from employees | eval x = to_string(languages) | keep emp_no, x | sort emp_no | limit 3 -| enrich languages_policy on x with lang = language_name; - -emp_no:integer | x:keyword | lang:keyword -10001 | 2 | French -10002 | 5 | null -10003 | 4 | German -; - - -withAliasOverwriteName#[skip:-8.13.0] -from employees | sort emp_no -| eval x = to_string(languages) | enrich languages_policy on x with emp_no = language_name -| keep emp_no | limit 1 -; - -emp_no:keyword -French -; - - -withAliasAndPlain -from employees | sort emp_no desc | limit 3 | eval x = to_string(languages) | keep emp_no, x -| enrich languages_policy on x with lang = language_name, language_name; - -emp_no:integer | x:keyword | lang:keyword | language_name:keyword -10100 | 4 | German | German -10099 | 2 | French | French -10098 | 4 | German | German -; - - -withTwoAliasesSameProp -from employees | sort emp_no | limit 1 | eval x = to_string(languages) | keep emp_no, x -| enrich languages_policy on x with lang = language_name, lang2 = language_name; - -emp_no:integer | x:keyword | lang:keyword | lang2:keyword -10001 | 2 | French | French -; - - -redundantWith -from employees | sort emp_no | limit 1 | eval x = to_string(languages) | keep emp_no, x -| enrich languages_policy on x with language_name, language_name; - -emp_no:integer | x:keyword | language_name:keyword -10001 | 2 | French -; - - -nullInput -from employees | where emp_no == 10017 | keep emp_no, gender -| enrich languages_policy on gender with language_name, language_name; - -emp_no:integer | gender:keyword | language_name:keyword -10017 | null | null -; - - -constantNullInput -from employees | where emp_no == 10020 | eval x = to_string(languages) | keep emp_no, x -| enrich languages_policy on x with language_name, language_name; - -emp_no:integer | x:keyword | language_name:keyword -10020 | null | null -; - - -multipleEnrich -row a = "1", b = "2", c = "10" -| enrich languages_policy on a with a_lang = language_name -| enrich languages_policy on b with b_lang = language_name -| enrich languages_policy on c with c_lang = language_name; - -a:keyword | b:keyword | c:keyword | a_lang:keyword | b_lang:keyword | c_lang:keyword -1 | 2 | 10 | English | French | null -; - - -enrichEval -from employees | eval x = to_string(languages) -| enrich languages_policy on x with lang = language_name -| eval language = concat(x, "-", lang) -| keep emp_no, x, lang, language -| sort emp_no desc | limit 3; - -emp_no:integer | x:keyword | lang:keyword | language:keyword -10100 | 4 | German | 4-German -10099 | 2 | French | 2-French -10098 | 4 | German | 4-German -; - - -multivalue -required_feature: esql.mv_sort -row a = ["1", "2"] | enrich languages_policy on a with a_lang = language_name | eval a_lang = mv_sort(a_lang); - -a:keyword | a_lang:keyword -["1", "2"] | ["English", "French"] -; - - -enrichCidr#[skip:-8.13.99, reason:enrich for cidr added in 8.14.0] -FROM sample_data -| ENRICH client_cidr_policy ON client_ip WITH env -| EVAL max_env = MV_MAX(env), count_env = MV_COUNT(env) -| KEEP client_ip, count_env, max_env -| SORT client_ip -; - -client_ip:ip | count_env:i | max_env:keyword -172.21.0.5 | 1 | Development -172.21.2.113 | 2 | QA -172.21.2.162 | 2 | QA -172.21.3.15 | 2 | Production -172.21.3.15 | 2 | Production -172.21.3.15 | 2 | Production -172.21.3.15 | 2 | Production -; - - -enrichCidr2#[skip:-8.99.99, reason:ip_range support not added yet] -FROM sample_data -| ENRICH client_cidr_policy ON client_ip WITH env, client_cidr -| KEEP client_ip, env, client_cidr -| SORT client_ip -; - -client_ip:ip | env:keyword | client_cidr:ip_range -172.21.3.15 | [Development, Production] | 172.21.3.0/24 -172.21.3.15 | [Development, Production] | 172.21.3.0/24 -172.21.3.15 | [Development, Production] | 172.21.3.0/24 -172.21.3.15 | [Development, Production] | 172.21.3.0/24 -172.21.0.5 | Development | 172.21.0.0/16 -172.21.2.113 | [Development, QA] | 172.21.2.0/24 -172.21.2.162 | [Development, QA] | 172.21.2.0/24 -; - - -enrichAgesStatsYear#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM employees -| WHERE birth_date > "1960-01-01" -| EVAL birth_year = DATE_EXTRACT("year", birth_date) -| EVAL age = 2022 - birth_year -| ENRICH ages_policy ON age WITH age_group = description -| STATS count=count(age_group) BY age_group, birth_year -| KEEP birth_year, age_group, count -| SORT birth_year DESC -; - -birth_year:long | age_group:keyword | count:long -1965 | Middle-aged | 1 -1964 | Middle-aged | 4 -1963 | Middle-aged | 7 -1962 | Senior | 6 -1961 | Senior | 8 -1960 | Senior | 8 -; - - -enrichAgesStatsAgeGroup#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM employees -| WHERE birth_date IS NOT NULL -| EVAL age = 2022 - DATE_EXTRACT("year", birth_date) -| ENRICH ages_policy ON age WITH age_group = description -| STATS count=count(age_group) BY age_group -| SORT count DESC -; - -count:long | age_group:keyword -78 | Senior -12 | Middle-aged -; - - -enrichHeightsStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM employees -| ENRICH heights_policy ON height WITH height_group = description -| STATS count=count(height_group), min=min(height), max=max(height) BY height_group -| KEEP height_group, min, max, count -| SORT min ASC -; - -height_group:k | min:double | max:double | count:long -Very Short | 1.41 | 1.48 | 9 -Short | 1.5 | 1.59 | 20 -Medium Height | 1.61 | 1.79 | 26 -Tall | 1.8 | 1.99 | 25 -Very Tall | 2.0 | 2.1 | 20 -; - - -enrichDecadesStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM employees -| ENRICH decades_policy ON birth_date WITH birth_decade = decade, birth_description = description -| ENRICH decades_policy ON hire_date WITH hire_decade = decade, hire_description = description -| STATS count=count(*) BY birth_decade, hire_decade, birth_description, hire_description -| KEEP birth_decade, hire_decade, birth_description, hire_description, count -| SORT birth_decade DESC, hire_decade DESC -; - -birth_decade:long | hire_decade:l | birth_description:k | hire_description:k | count:long -null | 1990 | null | Nineties Nostalgia | 6 -null | 1980 | null | Radical Eighties | 4 -1960 | 1990 | Swinging Sixties | Nineties Nostalgia | 13 -1960 | 1980 | Swinging Sixties | Radical Eighties | 21 -1950 | 1990 | Nifty Fifties | Nineties Nostalgia | 22 -1950 | 1980 | Nifty Fifties | Radical Eighties | 34 -; - - -spatialEnrichmentKeywordMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM airports -| WHERE abbrev == "CPH" -| ENRICH city_names ON city WITH airport, region, city_boundary -| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) -| KEEP abbrev, city, city_location, country, location, name, airport, region, boundary_wkt_length -; - -abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | airport:text | region:text | boundary_wkt_length:integer -CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark | POINT(12.6493508684508 55.6285017221528) | Copenhagen | Copenhagen | Københavns Kommune | 265 -; - - -spatialEnrichmentGeoMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM airports -| WHERE abbrev == "CPH" -| ENRICH city_boundaries ON city_location WITH airport, region, city_boundary -| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) -| KEEP abbrev, city, city_location, country, location, name, airport, region, boundary_wkt_length -; - -abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | airport:text | region:text | boundary_wkt_length:integer -CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark | POINT(12.6493508684508 55.6285017221528) | Copenhagen | Copenhagen | Københavns Kommune | 265 -; - - -spatialEnrichmentGeoMatchStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -required_feature: esql.mv_warn - -FROM airports -| ENRICH city_boundaries ON city_location WITH airport, region, city_boundary -| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) -| STATS city_centroid = ST_CENTROID_AGG(city_location), count = COUNT(city_location), min_wkt = MIN(boundary_wkt_length), max_wkt = MAX(boundary_wkt_length) -; -warning:Line 3:30: evaluation of [LENGTH(TO_STRING(city_boundary))] failed, treating result as null. Only first 20 failures recorded. -warning:Line 3:30: java.lang.IllegalArgumentException: single-value function encountered multi-value - -city_centroid:geo_point | count:long | min_wkt:integer | max_wkt:integer -POINT(1.396561 24.127649) | 872 | 88 | 1044 -; - - -spatialEnrichmentKeywordMatchAndSpatialPredicate#[skip:-8.13.99, reason:st_intersects added in 8.14] -FROM airports -| ENRICH city_names ON city WITH airport, region, city_boundary -| MV_EXPAND city_boundary -| EVAL airport_in_city = ST_INTERSECTS(location, city_boundary) -| STATS count=COUNT(*) BY airport_in_city -| SORT count ASC -; - -count:long | airport_in_city:boolean -114 | null -396 | true -455 | false -; - - -spatialEnrichmentKeywordMatchAndSpatialAggregation#[skip:-8.13.99, reason:st_intersects added in 8.14] -FROM airports -| ENRICH city_names ON city WITH airport, region, city_boundary -| MV_EXPAND city_boundary -| EVAL airport_in_city = ST_INTERSECTS(location, city_boundary) -| STATS count=COUNT(*), centroid=ST_CENTROID_AGG(location) BY airport_in_city -| SORT count ASC -; - -count:long | centroid:geo_point | airport_in_city:boolean -114 | POINT (-24.750062 31.575549) | null -396 | POINT (-2.534797 20.667712) | true -455 | POINT (3.090752 27.676442) | false -; - - -spatialEnrichmentTextMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -FROM airports -| WHERE abbrev == "IDR" -| ENRICH city_airports ON name WITH city_name = city, region, city_boundary -| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) -| KEEP abbrev, city_name, city_location, country, location, name, name, region, boundary_wkt_length -; - -abbrev:k | city_name:k | city_location:geo_point | country:k | location:geo_point | name:text | region:text | boundary_wkt_length:i -IDR | Indore | POINT(75.8472 22.7167) | India | POINT(75.8092915005895 22.727749187571) | Devi Ahilyabai Holkar Int'l | Indore City | 231 -; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec index f5847260bbb1..e84e79748c17 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec @@ -1,10 +1,10 @@ -simple +simpleNoLoad from employees | eval x = 1, y = to_string(languages) | enrich languages_policy on y | where x > 1 | keep emp_no, language_name | limit 1; emp_no:integer | language_name:keyword ; -docsGettingStartedEnrich +docsGettingStartedEnrichNoLoad // tag::gs-enrich[] FROM sample_data | KEEP @timestamp, client_ip, event_duration @@ -30,3 +30,408 @@ FROM sample_data median_duration:double | env:keyword ; + +simple +required_feature: esql.enrich_load + +row language_code = "1" +| enrich languages_policy +; + +language_code:keyword | language_name:keyword +1 | English +; + + +enrichOn +required_feature: esql.enrich_load + +from employees | sort emp_no | limit 1 | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name; + +emp_no:integer | language_name:keyword +10001 | French +; + + +enrichOn2 +required_feature: esql.enrich_load + +from employees | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name | sort emp_no | limit 1 ; + +emp_no:integer | language_name:keyword +10001 | French +; + + +simpleSortLimit +required_feature: esql.enrich_load + +from employees | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name | sort emp_no | limit 1; + +emp_no:integer | language_name:keyword +10001 | French +; + +with +required_feature: esql.enrich_load + +from employees | eval x = to_string(languages) | keep emp_no, x | sort emp_no | limit 1 +| enrich languages_policy on x with language_name; + +emp_no:integer | x:keyword | language_name:keyword +10001 | 2 | French +; + + +withAlias +required_feature: esql.enrich_load + +from employees | sort emp_no | limit 3 | eval x = to_string(languages) | keep emp_no, x +| enrich languages_policy on x with lang = language_name; + +emp_no:integer | x:keyword | lang:keyword +10001 | 2 | French +10002 | 5 | null +10003 | 4 | German +; + + +withAliasSort +required_feature: esql.enrich_load + +from employees | eval x = to_string(languages) | keep emp_no, x | sort emp_no | limit 3 +| enrich languages_policy on x with lang = language_name; + +emp_no:integer | x:keyword | lang:keyword +10001 | 2 | French +10002 | 5 | null +10003 | 4 | German +; + + +withAliasOverwriteName#[skip:-8.13.0] +required_feature: esql.enrich_load + +from employees | sort emp_no +| eval x = to_string(languages) | enrich languages_policy on x with emp_no = language_name +| keep emp_no | limit 1 +; + +emp_no:keyword +French +; + +withAliasAndPlain +required_feature: esql.enrich_load + +from employees | sort emp_no desc | limit 3 | eval x = to_string(languages) | keep emp_no, x +| enrich languages_policy on x with lang = language_name, language_name; + +emp_no:integer | x:keyword | lang:keyword | language_name:keyword +10100 | 4 | German | German +10099 | 2 | French | French +10098 | 4 | German | German +; + + +withTwoAliasesSameProp +required_feature: esql.enrich_load + +from employees | sort emp_no | limit 1 | eval x = to_string(languages) | keep emp_no, x +| enrich languages_policy on x with lang = language_name, lang2 = language_name; + +emp_no:integer | x:keyword | lang:keyword | lang2:keyword +10001 | 2 | French | French +; + + +redundantWith +required_feature: esql.enrich_load + +from employees | sort emp_no | limit 1 | eval x = to_string(languages) | keep emp_no, x +| enrich languages_policy on x with language_name, language_name; + +emp_no:integer | x:keyword | language_name:keyword +10001 | 2 | French +; + + +nullInput +required_feature: esql.enrich_load + +from employees | where emp_no == 10017 | keep emp_no, gender +| enrich languages_policy on gender with language_name, language_name; + +emp_no:integer | gender:keyword | language_name:keyword +10017 | null | null +; + + +constantNullInput +required_feature: esql.enrich_load + +from employees | where emp_no == 10020 | eval x = to_string(languages) | keep emp_no, x +| enrich languages_policy on x with language_name, language_name; + +emp_no:integer | x:keyword | language_name:keyword +10020 | null | null +; + + +multipleEnrich +required_feature: esql.enrich_load + +row a = "1", b = "2", c = "10" +| enrich languages_policy on a with a_lang = language_name +| enrich languages_policy on b with b_lang = language_name +| enrich languages_policy on c with c_lang = language_name; + +a:keyword | b:keyword | c:keyword | a_lang:keyword | b_lang:keyword | c_lang:keyword +1 | 2 | 10 | English | French | null +; + + +enrichEval +required_feature: esql.enrich_load + +from employees | eval x = to_string(languages) +| enrich languages_policy on x with lang = language_name +| eval language = concat(x, "-", lang) +| keep emp_no, x, lang, language +| sort emp_no desc | limit 3; + +emp_no:integer | x:keyword | lang:keyword | language:keyword +10100 | 4 | German | 4-German +10099 | 2 | French | 2-French +10098 | 4 | German | 4-German +; + + +multivalue +required_feature: esql.enrich_load +required_feature: esql.mv_sort + +row a = ["1", "2"] | enrich languages_policy on a with a_lang = language_name | eval a_lang = mv_sort(a_lang); + +a:keyword | a_lang:keyword +["1", "2"] | ["English", "French"] +; + + +enrichCidr#[skip:-8.13.99, reason:enrich for cidr added in 8.14.0] +required_feature: esql.enrich_load + +FROM sample_data +| ENRICH client_cidr_policy ON client_ip WITH env +| EVAL max_env = MV_MAX(env), count_env = MV_COUNT(env) +| KEEP client_ip, count_env, max_env +| SORT client_ip +; + +client_ip:ip | count_env:i | max_env:keyword +172.21.0.5 | 1 | Development +172.21.2.113 | 2 | QA +172.21.2.162 | 2 | QA +172.21.3.15 | 2 | Production +172.21.3.15 | 2 | Production +172.21.3.15 | 2 | Production +172.21.3.15 | 2 | Production +; + + +enrichCidr2#[skip:-8.99.99, reason:ip_range support not added yet] +required_feature: esql.enrich_load + +FROM sample_data +| ENRICH client_cidr_policy ON client_ip WITH env, client_cidr +| KEEP client_ip, env, client_cidr +| SORT client_ip +; + +client_ip:ip | env:keyword | client_cidr:ip_range +172.21.3.15 | [Development, Production] | 172.21.3.0/24 +172.21.3.15 | [Development, Production] | 172.21.3.0/24 +172.21.3.15 | [Development, Production] | 172.21.3.0/24 +172.21.3.15 | [Development, Production] | 172.21.3.0/24 +172.21.0.5 | Development | 172.21.0.0/16 +172.21.2.113 | [Development, QA] | 172.21.2.0/24 +172.21.2.162 | [Development, QA] | 172.21.2.0/24 +; + + +enrichAgesStatsYear#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load + +FROM employees +| WHERE birth_date > "1960-01-01" +| EVAL birth_year = DATE_EXTRACT("year", birth_date) +| EVAL age = 2022 - birth_year +| ENRICH ages_policy ON age WITH age_group = description +| STATS count=count(age_group) BY age_group, birth_year +| KEEP birth_year, age_group, count +| SORT birth_year DESC +; + +birth_year:long | age_group:keyword | count:long +1965 | Middle-aged | 1 +1964 | Middle-aged | 4 +1963 | Middle-aged | 7 +1962 | Senior | 6 +1961 | Senior | 8 +1960 | Senior | 8 +; + + +enrichAgesStatsAgeGroup#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load + +FROM employees +| WHERE birth_date IS NOT NULL +| EVAL age = 2022 - DATE_EXTRACT("year", birth_date) +| ENRICH ages_policy ON age WITH age_group = description +| STATS count=count(age_group) BY age_group +| SORT count DESC +; + +count:long | age_group:keyword +78 | Senior +12 | Middle-aged +; + + +enrichHeightsStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load + +FROM employees +| ENRICH heights_policy ON height WITH height_group = description +| STATS count=count(height_group), min=min(height), max=max(height) BY height_group +| KEEP height_group, min, max, count +| SORT min ASC +; + +height_group:k | min:double | max:double | count:long +Very Short | 1.41 | 1.48 | 9 +Short | 1.5 | 1.59 | 20 +Medium Height | 1.61 | 1.79 | 26 +Tall | 1.8 | 1.99 | 25 +Very Tall | 2.0 | 2.1 | 20 +; + + +enrichDecadesStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load + +FROM employees +| ENRICH decades_policy ON birth_date WITH birth_decade = decade, birth_description = description +| ENRICH decades_policy ON hire_date WITH hire_decade = decade, hire_description = description +| STATS count=count(*) BY birth_decade, hire_decade, birth_description, hire_description +| KEEP birth_decade, hire_decade, birth_description, hire_description, count +| SORT birth_decade DESC, hire_decade DESC +; + +birth_decade:long | hire_decade:l | birth_description:k | hire_description:k | count:long +null | 1990 | null | Nineties Nostalgia | 6 +null | 1980 | null | Radical Eighties | 4 +1960 | 1990 | Swinging Sixties | Nineties Nostalgia | 13 +1960 | 1980 | Swinging Sixties | Radical Eighties | 21 +1950 | 1990 | Nifty Fifties | Nineties Nostalgia | 22 +1950 | 1980 | Nifty Fifties | Radical Eighties | 34 +; + + +spatialEnrichmentKeywordMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load + +FROM airports +| WHERE abbrev == "CPH" +| ENRICH city_names ON city WITH airport, region, city_boundary +| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) +| KEEP abbrev, city, city_location, country, location, name, airport, region, boundary_wkt_length +; + +abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | airport:text | region:text | boundary_wkt_length:integer +CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark | POINT(12.6493508684508 55.6285017221528) | Copenhagen | Copenhagen | Københavns Kommune | 265 +; + + +spatialEnrichmentGeoMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load + +FROM airports +| WHERE abbrev == "CPH" +| ENRICH city_boundaries ON city_location WITH airport, region, city_boundary +| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) +| KEEP abbrev, city, city_location, country, location, name, airport, region, boundary_wkt_length +; + +abbrev:keyword | city:keyword | city_location:geo_point | country:keyword | location:geo_point | name:text | airport:text | region:text | boundary_wkt_length:integer +CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark | POINT(12.6493508684508 55.6285017221528) | Copenhagen | Copenhagen | Københavns Kommune | 265 +; + + +spatialEnrichmentGeoMatchStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load +required_feature: esql.mv_warn + +FROM airports +| ENRICH city_boundaries ON city_location WITH airport, region, city_boundary +| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) +| STATS city_centroid = ST_CENTROID_AGG(city_location), count = COUNT(city_location), min_wkt = MIN(boundary_wkt_length), max_wkt = MAX(boundary_wkt_length) +; +warning:Line 3:30: evaluation of [LENGTH(TO_STRING(city_boundary))] failed, treating result as null. Only first 20 failures recorded. +warning:Line 3:30: java.lang.IllegalArgumentException: single-value function encountered multi-value + +city_centroid:geo_point | count:long | min_wkt:integer | max_wkt:integer +POINT(1.396561 24.127649) | 872 | 88 | 1044 +; + + +spatialEnrichmentKeywordMatchAndSpatialPredicate#[skip:-8.13.99, reason:st_intersects added in 8.14] +required_feature: esql.enrich_load + +FROM airports +| ENRICH city_names ON city WITH airport, region, city_boundary +| MV_EXPAND city_boundary +| EVAL airport_in_city = ST_INTERSECTS(location, city_boundary) +| STATS count=COUNT(*) BY airport_in_city +| SORT count ASC +; + +count:long | airport_in_city:boolean +114 | null +396 | true +455 | false +; + + +spatialEnrichmentKeywordMatchAndSpatialAggregation#[skip:-8.13.99, reason:st_intersects added in 8.14] +required_feature: esql.enrich_load + +FROM airports +| ENRICH city_names ON city WITH airport, region, city_boundary +| MV_EXPAND city_boundary +| EVAL airport_in_city = ST_INTERSECTS(location, city_boundary) +| STATS count=COUNT(*), centroid=ST_CENTROID_AGG(location) BY airport_in_city +| SORT count ASC +; + +count:long | centroid:geo_point | airport_in_city:boolean +114 | POINT (-24.750062 31.575549) | null +396 | POINT (-2.534797 20.667712) | true +455 | POINT (3.090752 27.676442) | false +; + + +spatialEnrichmentTextMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] +required_feature: esql.enrich_load + +FROM airports +| WHERE abbrev == "IDR" +| ENRICH city_airports ON name WITH city_name = city, region, city_boundary +| EVAL boundary_wkt_length = LENGTH(TO_STRING(city_boundary)) +| KEEP abbrev, city_name, city_location, country, location, name, name, region, boundary_wkt_length +; + +abbrev:k | city_name:k | city_location:geo_point | country:k | location:geo_point | name:text | region:text | boundary_wkt_length:i +IDR | Indore | POINT(75.8472 22.7167) | India | POINT(75.8092915005895 22.727749187571) | Devi Ahilyabai Holkar Int'l | Indore City | 231 +; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java index 059eec771efe..4f852264193b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java @@ -136,6 +136,12 @@ public class EsqlFeatures implements FeatureSpecification { */ public static final NodeFeature METADATA_FIELDS = new NodeFeature("esql.metadata_fields"); + /** + * Support for loading values over enrich. This is supported by all versions of ESQL but not + * the unit test CsvTests. + */ + public static final NodeFeature ENRICH_LOAD = new NodeFeature("esql.enrich_load"); + /** * Support for timespan units abbreviations */ @@ -174,7 +180,8 @@ public Map getHistoricalFeatures() { Map.entry(MV_WARN, Version.V_8_12_0), Map.entry(SPATIAL_POINTS, Version.V_8_12_0), Map.entry(CONVERT_WARN, Version.V_8_12_0), - Map.entry(POW_DOUBLE, Version.V_8_12_0) + Map.entry(POW_DOUBLE, Version.V_8_12_0), + Map.entry(ENRICH_LOAD, Version.V_8_12_0) ); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index c865b21723a9..3539138e670e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -224,6 +224,7 @@ public final void test() throws Throwable { * are tested in integration tests. */ assumeFalse("metadata fields aren't supported", testCase.requiredFeatures.contains(EsqlFeatures.METADATA_FIELDS.id())); + assumeFalse("enrich can't load fields in csv tests", testCase.requiredFeatures.contains(EsqlFeatures.ENRICH_LOAD.id())); doTest(); } catch (Throwable th) { throw reworkException(th); From 87df295817c95c09e0df27fa4da32346b83850b7 Mon Sep 17 00:00:00 2001 From: Dianna Hohensee Date: Wed, 8 May 2024 13:34:56 -0400 Subject: [PATCH 007/119] Brief document blurb about RestClient (#107863) --- docs/internal/DistributedArchitectureGuide.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs/internal/DistributedArchitectureGuide.md b/docs/internal/DistributedArchitectureGuide.md index b8fb92b1ea15..7f10a1b3a8ca 100644 --- a/docs/internal/DistributedArchitectureGuide.md +++ b/docs/internal/DistributedArchitectureGuide.md @@ -133,6 +133,14 @@ are only used for internode operations/communications. ### Work Queues +### RestClient + +The `RestClient` is primarily used in testing, to send requests against cluster nodes in the same format as would users. There +are some uses of `RestClient`, via `RestClientBuilder`, in the production code. For example, remote reindex leverages the +`RestClient` internally as the REST client to the remote elasticsearch cluster, and to take advantage of the compatibility of +`RestClient` requests with much older elasticsearch versions. The `RestClient` is also used externally by the `Java API Client` +to communicate with Elasticsearch. + # Cluster Coordination (Sketch of important classes? Might inform more sections to add for details.) From 5a622b0a0719b050f6485fad9385135322d3a720 Mon Sep 17 00:00:00 2001 From: Andrew Wilkins Date: Thu, 9 May 2024 02:06:59 +0800 Subject: [PATCH 008/119] nativeaccess: try to load all located libsystemds (#108238) Linux systems with multiarch (e.g. i386 & x86_64) libraries may have libsystemd.0 in two subdirectories of an entry in java.library.path. For example, libsystemd.so.0 may be found in both /usr/lib/i386-linux-gnu and /usr/lib/x86_64-linux-gnu. Instead of attempting to load any library found, attempt all and stop as soon as one is successfully loaded. --- docs/changelog/108238.yaml | 6 ++ .../nativeaccess/jdk/JdkSystemdLibrary.java | 61 +++++++++++++------ 2 files changed, 47 insertions(+), 20 deletions(-) create mode 100644 docs/changelog/108238.yaml diff --git a/docs/changelog/108238.yaml b/docs/changelog/108238.yaml new file mode 100644 index 000000000000..607979c2eb0a --- /dev/null +++ b/docs/changelog/108238.yaml @@ -0,0 +1,6 @@ +pr: 108238 +summary: "Nativeaccess: try to load all located libsystemds" +area: Infra/Core +type: bug +issues: + - 107878 diff --git a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java index 5313984ac6d6..0af87154960a 100644 --- a/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java +++ b/libs/native/src/main21/java/org/elasticsearch/nativeaccess/jdk/JdkSystemdLibrary.java @@ -17,7 +17,10 @@ import java.lang.foreign.MemorySegment; import java.lang.invoke.MethodHandle; import java.nio.file.Files; +import java.nio.file.Path; import java.nio.file.Paths; +import java.util.Arrays; +import java.util.List; import static java.lang.foreign.ValueLayout.ADDRESS; import static java.lang.foreign.ValueLayout.JAVA_INT; @@ -26,31 +29,49 @@ class JdkSystemdLibrary implements SystemdLibrary { static { - System.load(findLibSystemd()); - } - - // On some systems libsystemd does not have a non-versioned symlink. System.loadLibrary only knows how to find - // non-versioned library files. So we must manually check the library path to find what we need. - static String findLibSystemd() { - final String libsystemd = "libsystemd.so.0"; - String libpath = System.getProperty("java.library.path"); - for (String basepathStr : libpath.split(":")) { - var basepath = Paths.get(basepathStr); - if (Files.exists(basepath) == false) { - continue; + // Find and load libsystemd. We attempt all instances of + // libsystemd in case of multiarch systems, and stop when + // one is successfully loaded. If none can be loaded, + // UnsatisfiedLinkError will be thrown. + List paths = findLibSystemd(); + if (paths.isEmpty()) { + String libpath = System.getProperty("java.library.path"); + throw new UnsatisfiedLinkError("Could not find libsystemd in java.library.path: " + libpath); + } + UnsatisfiedLinkError last = null; + for (String path : paths) { + try { + System.load(path); + last = null; + break; + } catch (UnsatisfiedLinkError e) { + last = e; } - try (var stream = Files.walk(basepath)) { + } + if (last != null) { + throw last; + } + } - var foundpath = stream.filter(Files::isDirectory).map(p -> p.resolve(libsystemd)).filter(Files::exists).findAny(); - if (foundpath.isPresent()) { - return foundpath.get().toAbsolutePath().toString(); - } + // findLibSystemd returns a list of paths to instances of libsystemd + // found within java.library.path. + static List findLibSystemd() { + // Note: on some systems libsystemd does not have a non-versioned symlink. + // System.loadLibrary only knows how to find non-versioned library files, + // so we must manually check the library path to find what we need. + final Path libsystemd = Paths.get("libsystemd.so.0"); + final String libpath = System.getProperty("java.library.path"); + return Arrays.stream(libpath.split(":")).map(Paths::get).filter(Files::exists).flatMap(p -> { + try { + return Files.find( + p, + Integer.MAX_VALUE, + (fp, attrs) -> (attrs.isDirectory() == false && fp.getFileName().equals(libsystemd)) + ); } catch (IOException e) { throw new UncheckedIOException(e); } - - } - throw new UnsatisfiedLinkError("Could not find " + libsystemd + " in java.library.path: " + libpath); + }).map(p -> p.toAbsolutePath().toString()).toList(); } private static final MethodHandle sd_notify$mh = downcallHandle("sd_notify", FunctionDescriptor.of(JAVA_INT, JAVA_INT, ADDRESS)); From ab40808044f582ac76dd859aa2ba30cc9b6bc790 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 8 May 2024 11:32:33 -0700 Subject: [PATCH 009/119] Exchange should wait for remote sinks (#108337) Today, we do not wait for remote sinks to stop before completing the main request. While this doesn't affect correctness, it's important that we do not spawn child requests after the parent request is completed. Closes #105859 --- .../exchange/ExchangeSourceHandler.java | 24 +++++++++++++++---- .../exchange/ExchangeServiceTests.java | 16 ++++++++++++- .../xpack/esql/plugin/ComputeService.java | 3 +++ 3 files changed, 38 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java index f1698ea401d2..adce8d8a8840 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java @@ -10,6 +10,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.compute.data.Page; @@ -17,6 +18,7 @@ import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.transport.TransportException; +import java.util.List; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; @@ -89,6 +91,20 @@ public int bufferSize() { } } + public void addCompletionListener(ActionListener listener) { + buffer.addCompletionListener(ActionListener.running(() -> { + try (RefCountingListener refs = new RefCountingListener(listener)) { + for (PendingInstances pending : List.of(outstandingSinks, outstandingSources)) { + // Create an outstanding instance and then finish to complete the completionListener + // if we haven't registered any instances of exchange sinks or exchange sources before. + pending.trackNewInstance(); + pending.completion.addListener(refs.acquire()); + pending.finishInstance(); + } + } + })); + } + /** * Create a new {@link ExchangeSource} for exchanging data * @@ -253,10 +269,10 @@ public Releasable addEmptySink() { private static class PendingInstances { private final AtomicInteger instances = new AtomicInteger(); - private final Releasable onComplete; + private final SubscribableListener completion = new SubscribableListener<>(); - PendingInstances(Releasable onComplete) { - this.onComplete = onComplete; + PendingInstances(Runnable onComplete) { + completion.addListener(ActionListener.running(onComplete)); } void trackNewInstance() { @@ -268,7 +284,7 @@ void finishInstance() { int refs = instances.decrementAndGet(); assert refs >= 0; if (refs == 0) { - onComplete.close(); + completion.onResponse(null); } } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java index bdaa045633dc..51332b3c8997 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java @@ -55,6 +55,7 @@ import java.util.Collections; import java.util.List; import java.util.Set; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; import java.util.function.Supplier; @@ -94,6 +95,8 @@ public void testBasic() throws Exception { ExchangeSink sink1 = sinkExchanger.createExchangeSink(); ExchangeSink sink2 = sinkExchanger.createExchangeSink(); ExchangeSourceHandler sourceExchanger = new ExchangeSourceHandler(3, threadPool.executor(ESQL_TEST_EXECUTOR)); + PlainActionFuture sourceCompletion = new PlainActionFuture<>(); + sourceExchanger.addCompletionListener(sourceCompletion); ExchangeSource source = sourceExchanger.createExchangeSource(); sourceExchanger.addRemoteSink(sinkExchanger::fetchPageAsync, 1); SubscribableListener waitForReading = source.waitForReading(); @@ -133,7 +136,9 @@ public void testBasic() throws Exception { sink2.finish(); assertTrue(sink2.isFinished()); assertTrue(source.isFinished()); + assertFalse(sourceCompletion.isDone()); source.finish(); + sourceCompletion.actionGet(10, TimeUnit.SECONDS); ESTestCase.terminate(threadPool); for (Page page : pages) { page.releaseBlocks(); @@ -320,7 +325,9 @@ protected void start(Driver driver, ActionListener listener) { public void testConcurrentWithHandlers() { BlockFactory blockFactory = blockFactory(); + PlainActionFuture sourceCompletionFuture = new PlainActionFuture<>(); var sourceExchanger = new ExchangeSourceHandler(randomExchangeBuffer(), threadPool.executor(ESQL_TEST_EXECUTOR)); + sourceExchanger.addCompletionListener(sourceCompletionFuture); List sinkHandlers = new ArrayList<>(); Supplier exchangeSink = () -> { final ExchangeSinkHandler sinkHandler; @@ -336,6 +343,7 @@ public void testConcurrentWithHandlers() { final int maxInputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); final int maxOutputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); runConcurrentTest(maxInputSeqNo, maxOutputSeqNo, sourceExchanger::createExchangeSource, exchangeSink); + sourceCompletionFuture.actionGet(10, TimeUnit.SECONDS); } public void testEarlyTerminate() { @@ -358,7 +366,7 @@ public void testEarlyTerminate() { assertTrue(sink.isFinished()); } - public void testConcurrentWithTransportActions() throws Exception { + public void testConcurrentWithTransportActions() { MockTransportService node0 = newTransportService(); ExchangeService exchange0 = new ExchangeService(Settings.EMPTY, threadPool, ESQL_TEST_EXECUTOR, blockFactory()); exchange0.registerTransportHandler(node0); @@ -371,12 +379,15 @@ public void testConcurrentWithTransportActions() throws Exception { String exchangeId = "exchange"; Task task = new Task(1, "", "", "", null, Collections.emptyMap()); var sourceHandler = new ExchangeSourceHandler(randomExchangeBuffer(), threadPool.executor(ESQL_TEST_EXECUTOR)); + PlainActionFuture sourceCompletionFuture = new PlainActionFuture<>(); + sourceHandler.addCompletionListener(sourceCompletionFuture); ExchangeSinkHandler sinkHandler = exchange1.createSinkHandler(exchangeId, randomExchangeBuffer()); Transport.Connection connection = node0.getConnection(node1.getLocalNode()); sourceHandler.addRemoteSink(exchange0.newRemoteSink(task, exchangeId, node0, connection), randomIntBetween(1, 5)); final int maxInputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); final int maxOutputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); runConcurrentTest(maxInputSeqNo, maxOutputSeqNo, sourceHandler::createExchangeSource, sinkHandler::createExchangeSink); + sourceCompletionFuture.actionGet(10, TimeUnit.SECONDS); } } @@ -427,6 +438,8 @@ public void sendResponse(TransportResponse transportResponse) { String exchangeId = "exchange"; Task task = new Task(1, "", "", "", null, Collections.emptyMap()); var sourceHandler = new ExchangeSourceHandler(randomIntBetween(1, 128), threadPool.executor(ESQL_TEST_EXECUTOR)); + PlainActionFuture sourceCompletionFuture = new PlainActionFuture<>(); + sourceHandler.addCompletionListener(sourceCompletionFuture); ExchangeSinkHandler sinkHandler = exchange1.createSinkHandler(exchangeId, randomIntBetween(1, 128)); Transport.Connection connection = node0.getConnection(node1.getLocalDiscoNode()); sourceHandler.addRemoteSink(exchange0.newRemoteSink(task, exchangeId, node0, connection), randomIntBetween(1, 5)); @@ -438,6 +451,7 @@ public void sendResponse(TransportResponse transportResponse) { assertNotNull(cause); assertThat(cause.getMessage(), equalTo("page is too large")); sinkHandler.onFailure(new RuntimeException(cause)); + sourceCompletionFuture.actionGet(10, TimeUnit.SECONDS); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index 7b38197dde95..d9005d5997b3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -205,6 +205,7 @@ public void execute( RefCountingListener refs = new RefCountingListener(listener.map(unused -> new Result(collectedPages, collectedProfiles))) ) { // run compute on the coordinator + exchangeSource.addCompletionListener(refs.acquire()); runCompute( rootTask, new ComputeContext(sessionId, RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, List.of(), configuration, exchangeSource, null), @@ -722,6 +723,7 @@ private void runComputeOnDataNode( var externalSink = exchangeService.getSinkHandler(externalId); task.addListener(() -> exchangeService.finishSinkHandler(externalId, new TaskCancelledException(task.getReasonCancelled()))); var exchangeSource = new ExchangeSourceHandler(1, esqlExecutor); + exchangeSource.addCompletionListener(refs.acquire()); exchangeSource.addRemoteSink(internalSink::fetchPageAsync, 1); ActionListener reductionListener = cancelOnFailure(task, cancelled, refs.acquire()); runCompute( @@ -854,6 +856,7 @@ void runComputeOnRemoteCluster( RefCountingListener refs = new RefCountingListener(listener.map(unused -> new ComputeResponse(collectedProfiles))) ) { exchangeSink.addCompletionListener(refs.acquire()); + exchangeSource.addCompletionListener(refs.acquire()); PhysicalPlan coordinatorPlan = new ExchangeSinkExec( plan.source(), plan.output(), From ef8b6107aa2d3b29ebadef40f4a1a81e9fef0553 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Wed, 8 May 2024 14:07:25 -0600 Subject: [PATCH 010/119] Move kibana reporting data stream settings into component template (#107581) Previously these were contained in the index template, however, Kibana needs to be able to make overrides to only the settings, so factoring these out would allow them to do this (in such a way that they can be overridden by the `kibana-reporting@custom` component template as well). Relates to #97765 --- .../main/resources/kibana-reporting@settings.json | 14 ++++++++++++++ .../main/resources/kibana-reporting@template.json | 6 +----- .../xpack/stack/StackTemplateRegistry.java | 8 ++++++++ .../xpack/stack/StackTemplateRegistryTests.java | 5 +++++ 4 files changed, 28 insertions(+), 5 deletions(-) create mode 100644 x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@settings.json diff --git a/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@settings.json b/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@settings.json new file mode 100644 index 000000000000..933d7681c92e --- /dev/null +++ b/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@settings.json @@ -0,0 +1,14 @@ +{ + "template": { + "settings": { + "number_of_shards": 1, + "auto_expand_replicas": "0-1" + } + }, + "_meta": { + "description": "default kibana reporting settings installed by elasticsearch", + "managed": true + }, + "version": ${xpack.stack.template.version}, + "deprecated": ${xpack.stack.template.deprecated} +} diff --git a/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@template.json b/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@template.json index 9c4da646c339..240ad36199fe 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@template.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@template.json @@ -5,14 +5,10 @@ "hidden": true }, "allow_auto_create": true, - "composed_of": ["kibana-reporting@custom"], + "composed_of": ["kibana-reporting@settings", "kibana-reporting@custom"], "ignore_missing_component_templates": ["kibana-reporting@custom"], "template": { "lifecycle": {}, - "settings": { - "number_of_shards": 1, - "auto_expand_replicas": "0-1" - }, "mappings": { "properties": { "meta": { diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java index 4fdb2d05c532..30323a1d7d36 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java @@ -107,6 +107,7 @@ public class StackTemplateRegistry extends IndexTemplateRegistry { // Kibana reporting template /////////////////////////////////// public static final String KIBANA_REPORTING_INDEX_TEMPLATE_NAME = ".kibana-reporting"; + public static final String KIBANA_REPORTING_COMPONENT_TEMPLATE_NAME = "kibana-reporting@settings"; public StackTemplateRegistry( Settings nodeSettings, @@ -229,6 +230,13 @@ protected List getLifecyclePolicies() { REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE, ADDITIONAL_TEMPLATE_VARIABLES + ), + new IndexTemplateConfig( + KIBANA_REPORTING_COMPONENT_TEMPLATE_NAME, + "/kibana-reporting@settings.json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE, + ADDITIONAL_TEMPLATE_VARIABLES ) )) { try { diff --git a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java index 782fe3b41ae3..abb2d5765b12 100644 --- a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java +++ b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java @@ -429,6 +429,7 @@ public void testSameOrHigherVersionTemplateNotUpgraded() { versions.put(StackTemplateRegistry.METRICS_MAPPINGS_COMPONENT_TEMPLATE_NAME, StackTemplateRegistry.REGISTRY_VERSION); versions.put(StackTemplateRegistry.SYNTHETICS_SETTINGS_COMPONENT_TEMPLATE_NAME, StackTemplateRegistry.REGISTRY_VERSION); versions.put(StackTemplateRegistry.SYNTHETICS_MAPPINGS_COMPONENT_TEMPLATE_NAME, StackTemplateRegistry.REGISTRY_VERSION); + versions.put(StackTemplateRegistry.KIBANA_REPORTING_COMPONENT_TEMPLATE_NAME, StackTemplateRegistry.REGISTRY_VERSION); ClusterChangedEvent sameVersionEvent = createClusterChangedEvent(versions, nodes); client.setVerifier((action, request, listener) -> { if (action instanceof PutComponentTemplateAction) { @@ -484,6 +485,10 @@ public void testSameOrHigherVersionTemplateNotUpgraded() { StackTemplateRegistry.SYNTHETICS_MAPPINGS_COMPONENT_TEMPLATE_NAME, StackTemplateRegistry.REGISTRY_VERSION + randomIntBetween(1, 1000) ); + versions.put( + StackTemplateRegistry.KIBANA_REPORTING_COMPONENT_TEMPLATE_NAME, + StackTemplateRegistry.REGISTRY_VERSION + randomIntBetween(1, 1000) + ); ClusterChangedEvent higherVersionEvent = createClusterChangedEvent(versions, nodes); registry.clusterChanged(higherVersionEvent); } From 5b9dd3dda04820589df495ed68605d2fd7b2caf2 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Wed, 8 May 2024 14:02:26 -0700 Subject: [PATCH 011/119] Mock empty state task in file settings tests (#108100) When the file watched by file settings is initially missing, a special method in reserved state service is called to write a dummy cluster state entry. In the case of tests, there is no real running master service, so when the task is submitted, the file watcher thread actually barfs and the watcher dies, silently. That then causes the test to timeout as it waits indefinitely but the file watcher is no longer watching for the test file that was written. This commit mocks out writing this empty state in the reserved state service. It also collapses the two tests that check stopping while blocked in processing works since they were almost exactly the same. closes #106968 --- .../service/FileSettingsServiceTests.java | 58 +++---------------- 1 file changed, 7 insertions(+), 51 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java index 53ca55f8a5f8..aca5d2cbee2c 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java @@ -8,8 +8,8 @@ package org.elasticsearch.reservedstate.service; -import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -55,7 +55,6 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/106968") public class FileSettingsServiceTests extends ESTestCase { private Environment env; private ClusterService clusterService; @@ -234,54 +233,11 @@ public void testStopWorksInMiddleOfProcessing() throws Exception { return new ReservedStateChunk(Collections.emptyMap(), new ReservedStateVersion(1L, Version.CURRENT)); }).when(spiedController).parse(any(String.class), any()); - service.start(); - service.clusterChanged(new ClusterChangedEvent("test", clusterService.state(), ClusterState.EMPTY_STATE)); - assertTrue(service.watching()); - - Files.createDirectories(service.watchedFileDir()); - - // Make some fake settings file to cause the file settings service to process it - writeTestFile(service.watchedFile(), "{}"); - - // we need to wait a bit, on MacOS it may take up to 10 seconds for the Java watcher service to notice the file, - // on Linux is instantaneous. Windows is instantaneous too. - assertTrue(processFileLatch.await(30, TimeUnit.SECONDS)); - - // Stopping the service should interrupt the watcher thread, we should be able to stop - service.stop(); - assertFalse(service.watching()); - service.close(); - // let the deadlocked thread end, so we can cleanly exit the test - deadThreadLatch.countDown(); - } - - public void testStopWorksIfProcessingDidntReturnYet() throws Exception { - var spiedController = spy(controller); - var service = new FileSettingsService(clusterService, spiedController, env); - - CountDownLatch processFileLatch = new CountDownLatch(1); - CountDownLatch deadThreadLatch = new CountDownLatch(1); - - doAnswer((Answer) invocation -> { - // allow the other thread to continue, but hold on a bit to avoid - // completing the task immediately in the main watcher loop - try { - Thread.sleep(1_000); - } catch (InterruptedException e) { - // pass it on - Thread.currentThread().interrupt(); - } - processFileLatch.countDown(); - new Thread(() -> { - // Simulate a thread that never allows the completion to complete - try { - deadThreadLatch.await(); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }).start(); - return new ReservedStateChunk(Collections.emptyMap(), new ReservedStateVersion(1L, Version.CURRENT)); - }).when(spiedController).parse(any(String.class), any()); + doAnswer((Answer) invocation -> { + var completionListener = invocation.getArgument(1, ActionListener.class); + completionListener.onResponse(null); + return null; + }).when(spiedController).initEmpty(any(String.class), any()); service.start(); service.clusterChanged(new ClusterChangedEvent("test", clusterService.state(), ClusterState.EMPTY_STATE)); @@ -296,7 +252,7 @@ public void testStopWorksIfProcessingDidntReturnYet() throws Exception { // on Linux is instantaneous. Windows is instantaneous too. assertTrue(processFileLatch.await(30, TimeUnit.SECONDS)); - // Stopping the service should interrupt the watcher thread, allowing the whole thing to exit + // Stopping the service should interrupt the watcher thread, we should be able to stop service.stop(); assertFalse(service.watching()); service.close(); From 90b238e9d80ef39bd9786df40655f08c58bb5c0a Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Wed, 8 May 2024 15:28:25 -0600 Subject: [PATCH 012/119] Mention alias filters don't apply for get-by-id in docs (#108433) Resolves #3861 --- docs/reference/alias.asciidoc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/reference/alias.asciidoc b/docs/reference/alias.asciidoc index e5c2db65778d..9d784f530d63 100644 --- a/docs/reference/alias.asciidoc +++ b/docs/reference/alias.asciidoc @@ -358,6 +358,8 @@ POST _aliases ---- // TEST[s/^/PUT my-index-2099.05.06-000001\n/] +NOTE: Filters are only applied when using the <>, and are not applied when <>. + [discrete] [[alias-routing]] === Routing From 31abf3e00c119bd807d69574c9fb18dba4b6d9bb Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Wed, 8 May 2024 15:38:38 -0700 Subject: [PATCH 013/119] Make MockLogAppender threadsafe (#108206) Adding and removing appenders in Log4j is not threadsafe. Yet some tests rely on capturing logging by adding an in memory appender, MockLogAppender. This commit makes the mock logging threadsafe by creating a new, singular appender for mock logging that delegates, in a threadsafe way, to the existing appenders created. Confusingly MockLogAppender is no longer really an appender, but I'm leaving clarifying that for a followup so as to limit the scope of this PR. closes #106425 --- .../bootstrap/SpawnerNoBootstrapTests.java | 1 + .../common/settings/SettingsFilterTests.java | 1 - .../org/elasticsearch/test/ESTestCase.java | 1 + .../elasticsearch/test/MockLogAppender.java | 84 +++++++++++++------ .../test/MockLogAppenderTests.java | 38 +++++++++ ...LoadAuthorizedIndicesTimeCheckerTests.java | 2 - 6 files changed, 100 insertions(+), 27 deletions(-) create mode 100644 test/framework/src/test/java/org/elasticsearch/test/MockLogAppenderTests.java diff --git a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java index 81b3a086e9ac..c4aa3c9b1f1e 100644 --- a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java +++ b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java @@ -64,6 +64,7 @@ public class SpawnerNoBootstrapTests extends LuceneTestCase { static { // normally done by ESTestCase, but need here because spawner depends on logging LogConfigurator.loadLog4jPlugins(); + MockLogAppender.init(); } static class ExpectedStreamMessage implements MockLogAppender.LoggingExpectation { diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java index 4885bbc277cb..8e62a9306a3d 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/SettingsFilterTests.java @@ -119,7 +119,6 @@ private void assertExpectedLogMessages(Consumer consumer, MockLogAppende Logger testLogger = LogManager.getLogger("org.elasticsearch.test"); MockLogAppender appender = new MockLogAppender(); try (var ignored = appender.capturing("org.elasticsearch.test")) { - appender.start(); Arrays.stream(expectations).forEach(appender::addExpectation); consumer.accept(testLogger); appender.assertAllExpectationsMatched(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 804dbfbb2dc4..83f7fdfe386c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -260,6 +260,7 @@ public static void resetPortCounter() { // TODO: consolidate logging initialization for tests so it all occurs in logconfigurator LogConfigurator.loadLog4jPlugins(); LogConfigurator.configureESLogging(); + MockLogAppender.init(); final List testAppenders = new ArrayList<>(3); for (String leakLoggerName : Arrays.asList("io.netty.util.ResourceLeakDetector", LeakTracker.class.getName())) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java b/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java index 10a3a8a78e48..bc3723119afa 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java +++ b/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java @@ -9,7 +9,6 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.LogEvent; import org.apache.logging.log4j.core.appender.AbstractAppender; import org.apache.logging.log4j.core.config.Property; @@ -19,9 +18,10 @@ import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.Objects; +import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.regex.Pattern; import static org.hamcrest.CoreMatchers.equalTo; @@ -31,12 +31,38 @@ /** * Test appender that can be used to verify that certain events were logged correctly */ -public class MockLogAppender extends AbstractAppender { +public class MockLogAppender { + private static final Map> mockAppenders = new ConcurrentHashMap<>(); + private static final RealMockAppender parent = new RealMockAppender(); private final List expectations; + private volatile boolean isAlive = true; + + private static class RealMockAppender extends AbstractAppender { + + RealMockAppender() { + super("mock", null, null, false, Property.EMPTY_ARRAY); + } + + @Override + public void append(LogEvent event) { + List appenders = mockAppenders.get(event.getLoggerName()); + if (appenders == null) { + // check if there is a root appender + appenders = mockAppenders.getOrDefault("", List.of()); + } + for (MockLogAppender appender : appenders) { + if (appender.isAlive == false) { + continue; + } + for (LoggingExpectation expectation : appender.expectations) { + expectation.match(event); + } + } + } + } public MockLogAppender() { - super("mock", null, null, false, Property.EMPTY_ARRAY); /* * We use a copy-on-write array list since log messages could be appended while we are setting up expectations. When that occurs, * we would run into a concurrent modification exception from the iteration over the expectations in #append, concurrent with a @@ -45,15 +71,16 @@ public MockLogAppender() { expectations = new CopyOnWriteArrayList<>(); } - public void addExpectation(LoggingExpectation expectation) { - expectations.add(new WrappedLoggingExpectation(expectation)); + /** + * Initialize the mock log appender with the log4j system. + */ + public static void init() { + parent.start(); + Loggers.addAppender(LogManager.getLogger(""), parent); } - @Override - public void append(LogEvent event) { - for (LoggingExpectation expectation : expectations) { - expectation.match(event); - } + public void addExpectation(LoggingExpectation expectation) { + expectations.add(new WrappedLoggingExpectation(expectation)); } public void assertAllExpectationsMatched() { @@ -213,7 +240,7 @@ public void assertMatched() { */ private static class WrappedLoggingExpectation implements LoggingExpectation { - private final AtomicBoolean assertMatchedCalled = new AtomicBoolean(false); + private volatile boolean assertMatchedCalled = false; private final LoggingExpectation delegate; private WrappedLoggingExpectation(LoggingExpectation delegate) { @@ -230,7 +257,7 @@ public void assertMatched() { try { delegate.assertMatched(); } finally { - assertMatchedCalled.set(true); + assertMatchedCalled = true; } } @@ -243,34 +270,43 @@ public String toString() { /** * Adds the list of class loggers to this {@link MockLogAppender}. * - * Stops ({@link #stop()}) and runs some checks on the {@link MockLogAppender} once the returned object is released. + * Stops and runs some checks on the {@link MockLogAppender} once the returned object is released. */ public Releasable capturing(Class... classes) { - return appendToLoggers(Arrays.stream(classes).map(LogManager::getLogger).toList()); + return appendToLoggers(Arrays.stream(classes).map(Class::getCanonicalName).toList()); } /** * Same as above except takes string class names of each logger. */ public Releasable capturing(String... names) { - return appendToLoggers(Arrays.stream(names).map(LogManager::getLogger).toList()); + return appendToLoggers(Arrays.asList(names)); } - private Releasable appendToLoggers(List loggers) { - start(); - for (final var logger : loggers) { - Loggers.addAppender(logger, this); + private Releasable appendToLoggers(List loggers) { + for (String logger : loggers) { + mockAppenders.compute(logger, (k, v) -> { + if (v == null) { + v = new CopyOnWriteArrayList<>(); + } + v.add(this); + return v; + }); } return () -> { - for (final var logger : loggers) { - Loggers.removeAppender(logger, this); + isAlive = false; + for (String logger : loggers) { + mockAppenders.compute(logger, (k, v) -> { + assert v != null; + v.remove(this); + return v.isEmpty() ? null : v; + }); } - stop(); // check that all expectations have been evaluated before this is released for (WrappedLoggingExpectation expectation : expectations) { assertThat( "Method assertMatched() not called on LoggingExpectation instance before release: " + expectation, - expectation.assertMatchedCalled.get(), + expectation.assertMatchedCalled, is(true) ); } diff --git a/test/framework/src/test/java/org/elasticsearch/test/MockLogAppenderTests.java b/test/framework/src/test/java/org/elasticsearch/test/MockLogAppenderTests.java new file mode 100644 index 000000000000..4973bb83311b --- /dev/null +++ b/test/framework/src/test/java/org/elasticsearch/test/MockLogAppenderTests.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.test; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.util.concurrent.atomic.AtomicBoolean; + +public class MockLogAppenderTests extends ESTestCase { + + public void testConcurrentLogAndLifecycle() throws Exception { + Logger logger = LogManager.getLogger(MockLogAppenderTests.class); + final var keepGoing = new AtomicBoolean(true); + final var logThread = new Thread(() -> { + while (keepGoing.get()) { + logger.info("test"); + } + }); + logThread.start(); + + final var appender = new MockLogAppender(); + for (int i = 0; i < 1000; i++) { + try (var ignored = appender.capturing(MockLogAppenderTests.class)) { + Thread.yield(); + } + } + + keepGoing.set(false); + logThread.join(); + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/LoadAuthorizedIndicesTimeCheckerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/LoadAuthorizedIndicesTimeCheckerTests.java index e06f6f212c68..8295f028588c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/LoadAuthorizedIndicesTimeCheckerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/LoadAuthorizedIndicesTimeCheckerTests.java @@ -12,7 +12,6 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.TransportSearchAction; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; @@ -195,7 +194,6 @@ private void testLogging( ); final MockLogAppender mockAppender = new MockLogAppender(); try (var ignored = mockAppender.capturing(timerLogger.getName())) { - Loggers.addAppender(timerLogger, mockAppender); mockAppender.addExpectation(expectation); checker.accept(List.of()); mockAppender.assertAllExpectationsMatched(); From e56ed71ef8beb973c039e52ed2790c8dc119af22 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Wed, 8 May 2024 17:13:46 -0700 Subject: [PATCH 014/119] ESQL: Disable quoting in FROM command (#108431) Disable location quoting in FROM command before 8.14 release to allow more time to discuss options --- docs/changelog/108431.yaml | 5 + .../esql/src/main/antlr/EsqlBaseLexer.g4 | 4 - .../esql/src/main/antlr/EsqlBaseParser.g4 | 1 - .../xpack/esql/parser/EsqlBaseLexer.interp | 3 +- .../xpack/esql/parser/EsqlBaseLexer.java | 1426 ++++++++--------- .../xpack/esql/parser/EsqlBaseParser.interp | 2 +- .../xpack/esql/parser/EsqlBaseParser.java | 144 +- .../xpack/esql/parser/IdentifierBuilder.java | 2 +- .../esql/parser/StatementParserTests.java | 14 +- 9 files changed, 793 insertions(+), 808 deletions(-) create mode 100644 docs/changelog/108431.yaml diff --git a/docs/changelog/108431.yaml b/docs/changelog/108431.yaml new file mode 100644 index 000000000000..84607b1b99ac --- /dev/null +++ b/docs/changelog/108431.yaml @@ -0,0 +1,5 @@ +pr: 108431 +summary: "ESQL: Disable quoting in FROM command" +area: ES|QL +type: bug +issues: [] diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index c4a3dc7c5661..f16afa86199f 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -201,10 +201,6 @@ FROM_UNQUOTED_IDENTIFIER : FROM_UNQUOTED_IDENTIFIER_PART+ ; -FROM_QUOTED_IDENTIFIER - : QUOTED_IDENTIFIER -> type(QUOTED_IDENTIFIER) - ; - FROM_LINE_COMMENT : LINE_COMMENT -> channel(HIDDEN) ; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index 62dcc6ebd484..e30bc8359594 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -109,7 +109,6 @@ fromCommand fromIdentifier : FROM_UNQUOTED_IDENTIFIER - | QUOTED_IDENTIFIER ; fromOptions diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index 866093ef55a6..d6ad79586fa7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -319,7 +319,6 @@ OPTIONS METADATA FROM_UNQUOTED_IDENTIFIER_PART FROM_UNQUOTED_IDENTIFIER -FROM_QUOTED_IDENTIFIER FROM_LINE_COMMENT FROM_MULTILINE_COMMENT FROM_WS @@ -405,4 +404,4 @@ META_MODE SETTING_MODE atn: -[4, 0, 110, 1203, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 4, 18, 484, 8, 18, 11, 18, 12, 18, 485, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 494, 8, 19, 10, 19, 12, 19, 497, 9, 19, 1, 19, 3, 19, 500, 8, 19, 1, 19, 3, 19, 503, 8, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 512, 8, 20, 10, 20, 12, 20, 515, 9, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 4, 21, 523, 8, 21, 11, 21, 12, 21, 524, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 3, 32, 566, 8, 32, 1, 32, 4, 32, 569, 8, 32, 11, 32, 12, 32, 570, 1, 33, 1, 33, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 3, 35, 580, 8, 35, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 3, 37, 587, 8, 37, 1, 38, 1, 38, 1, 38, 5, 38, 592, 8, 38, 10, 38, 12, 38, 595, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 603, 8, 38, 10, 38, 12, 38, 606, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 613, 8, 38, 1, 38, 3, 38, 616, 8, 38, 3, 38, 618, 8, 38, 1, 39, 4, 39, 621, 8, 39, 11, 39, 12, 39, 622, 1, 40, 4, 40, 626, 8, 40, 11, 40, 12, 40, 627, 1, 40, 1, 40, 5, 40, 632, 8, 40, 10, 40, 12, 40, 635, 9, 40, 1, 40, 1, 40, 4, 40, 639, 8, 40, 11, 40, 12, 40, 640, 1, 40, 4, 40, 644, 8, 40, 11, 40, 12, 40, 645, 1, 40, 1, 40, 5, 40, 650, 8, 40, 10, 40, 12, 40, 653, 9, 40, 3, 40, 655, 8, 40, 1, 40, 1, 40, 1, 40, 1, 40, 4, 40, 661, 8, 40, 11, 40, 12, 40, 662, 1, 40, 1, 40, 3, 40, 667, 8, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 1, 77, 1, 77, 1, 77, 1, 78, 1, 78, 5, 78, 798, 8, 78, 10, 78, 12, 78, 801, 9, 78, 1, 78, 1, 78, 3, 78, 805, 8, 78, 1, 78, 4, 78, 808, 8, 78, 11, 78, 12, 78, 809, 3, 78, 812, 8, 78, 1, 79, 1, 79, 4, 79, 816, 8, 79, 11, 79, 12, 79, 817, 1, 79, 1, 79, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 3, 92, 881, 8, 92, 1, 93, 4, 93, 884, 8, 93, 11, 93, 12, 93, 885, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 3, 101, 921, 8, 101, 1, 102, 1, 102, 3, 102, 925, 8, 102, 1, 102, 5, 102, 928, 8, 102, 10, 102, 12, 102, 931, 9, 102, 1, 102, 1, 102, 3, 102, 935, 8, 102, 1, 102, 4, 102, 938, 8, 102, 11, 102, 12, 102, 939, 3, 102, 942, 8, 102, 1, 103, 1, 103, 4, 103, 946, 8, 103, 11, 103, 12, 103, 947, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 121, 4, 121, 1023, 8, 121, 11, 121, 12, 121, 1024, 1, 121, 1, 121, 3, 121, 1029, 8, 121, 1, 121, 4, 121, 1032, 8, 121, 11, 121, 12, 121, 1033, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 4, 156, 1188, 8, 156, 11, 156, 12, 156, 1189, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 2, 513, 604, 0, 160, 12, 1, 14, 2, 16, 3, 18, 4, 20, 5, 22, 6, 24, 7, 26, 8, 28, 9, 30, 10, 32, 11, 34, 12, 36, 13, 38, 14, 40, 15, 42, 16, 44, 17, 46, 18, 48, 19, 50, 20, 52, 21, 54, 22, 56, 0, 58, 0, 60, 23, 62, 24, 64, 25, 66, 26, 68, 0, 70, 0, 72, 0, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 27, 90, 28, 92, 29, 94, 30, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 67, 170, 0, 172, 68, 174, 69, 176, 70, 178, 71, 180, 0, 182, 0, 184, 0, 186, 0, 188, 0, 190, 0, 192, 72, 194, 73, 196, 0, 198, 74, 200, 0, 202, 75, 204, 76, 206, 77, 208, 0, 210, 0, 212, 0, 214, 0, 216, 0, 218, 78, 220, 79, 222, 80, 224, 81, 226, 0, 228, 0, 230, 0, 232, 0, 234, 82, 236, 0, 238, 83, 240, 84, 242, 85, 244, 0, 246, 0, 248, 86, 250, 87, 252, 0, 254, 88, 256, 0, 258, 0, 260, 89, 262, 90, 264, 91, 266, 0, 268, 0, 270, 0, 272, 0, 274, 0, 276, 0, 278, 0, 280, 92, 282, 93, 284, 94, 286, 0, 288, 0, 290, 0, 292, 0, 294, 95, 296, 96, 298, 97, 300, 0, 302, 98, 304, 99, 306, 100, 308, 101, 310, 0, 312, 102, 314, 103, 316, 104, 318, 105, 320, 0, 322, 106, 324, 107, 326, 108, 328, 109, 330, 110, 12, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 5, 0, 34, 34, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 10, 0, 9, 10, 13, 13, 32, 32, 44, 44, 47, 47, 61, 61, 91, 91, 93, 93, 96, 96, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1230, 0, 12, 1, 0, 0, 0, 0, 14, 1, 0, 0, 0, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 1, 56, 1, 0, 0, 0, 1, 58, 1, 0, 0, 0, 1, 60, 1, 0, 0, 0, 1, 62, 1, 0, 0, 0, 1, 64, 1, 0, 0, 0, 2, 66, 1, 0, 0, 0, 2, 88, 1, 0, 0, 0, 2, 90, 1, 0, 0, 0, 2, 92, 1, 0, 0, 0, 2, 94, 1, 0, 0, 0, 2, 96, 1, 0, 0, 0, 2, 98, 1, 0, 0, 0, 2, 100, 1, 0, 0, 0, 2, 102, 1, 0, 0, 0, 2, 104, 1, 0, 0, 0, 2, 106, 1, 0, 0, 0, 2, 108, 1, 0, 0, 0, 2, 110, 1, 0, 0, 0, 2, 112, 1, 0, 0, 0, 2, 114, 1, 0, 0, 0, 2, 116, 1, 0, 0, 0, 2, 118, 1, 0, 0, 0, 2, 120, 1, 0, 0, 0, 2, 122, 1, 0, 0, 0, 2, 124, 1, 0, 0, 0, 2, 126, 1, 0, 0, 0, 2, 128, 1, 0, 0, 0, 2, 130, 1, 0, 0, 0, 2, 132, 1, 0, 0, 0, 2, 134, 1, 0, 0, 0, 2, 136, 1, 0, 0, 0, 2, 138, 1, 0, 0, 0, 2, 140, 1, 0, 0, 0, 2, 142, 1, 0, 0, 0, 2, 144, 1, 0, 0, 0, 2, 146, 1, 0, 0, 0, 2, 148, 1, 0, 0, 0, 2, 150, 1, 0, 0, 0, 2, 152, 1, 0, 0, 0, 2, 154, 1, 0, 0, 0, 2, 156, 1, 0, 0, 0, 2, 158, 1, 0, 0, 0, 2, 160, 1, 0, 0, 0, 2, 162, 1, 0, 0, 0, 2, 164, 1, 0, 0, 0, 2, 166, 1, 0, 0, 0, 2, 168, 1, 0, 0, 0, 2, 172, 1, 0, 0, 0, 2, 174, 1, 0, 0, 0, 2, 176, 1, 0, 0, 0, 2, 178, 1, 0, 0, 0, 3, 180, 1, 0, 0, 0, 3, 182, 1, 0, 0, 0, 3, 184, 1, 0, 0, 0, 3, 186, 1, 0, 0, 0, 3, 188, 1, 0, 0, 0, 3, 190, 1, 0, 0, 0, 3, 192, 1, 0, 0, 0, 3, 194, 1, 0, 0, 0, 3, 198, 1, 0, 0, 0, 3, 200, 1, 0, 0, 0, 3, 202, 1, 0, 0, 0, 3, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 4, 208, 1, 0, 0, 0, 4, 210, 1, 0, 0, 0, 4, 212, 1, 0, 0, 0, 4, 218, 1, 0, 0, 0, 4, 220, 1, 0, 0, 0, 4, 222, 1, 0, 0, 0, 4, 224, 1, 0, 0, 0, 5, 226, 1, 0, 0, 0, 5, 228, 1, 0, 0, 0, 5, 230, 1, 0, 0, 0, 5, 232, 1, 0, 0, 0, 5, 234, 1, 0, 0, 0, 5, 236, 1, 0, 0, 0, 5, 238, 1, 0, 0, 0, 5, 240, 1, 0, 0, 0, 5, 242, 1, 0, 0, 0, 6, 244, 1, 0, 0, 0, 6, 246, 1, 0, 0, 0, 6, 248, 1, 0, 0, 0, 6, 250, 1, 0, 0, 0, 6, 254, 1, 0, 0, 0, 6, 256, 1, 0, 0, 0, 6, 258, 1, 0, 0, 0, 6, 260, 1, 0, 0, 0, 6, 262, 1, 0, 0, 0, 6, 264, 1, 0, 0, 0, 7, 266, 1, 0, 0, 0, 7, 268, 1, 0, 0, 0, 7, 270, 1, 0, 0, 0, 7, 272, 1, 0, 0, 0, 7, 274, 1, 0, 0, 0, 7, 276, 1, 0, 0, 0, 7, 278, 1, 0, 0, 0, 7, 280, 1, 0, 0, 0, 7, 282, 1, 0, 0, 0, 7, 284, 1, 0, 0, 0, 8, 286, 1, 0, 0, 0, 8, 288, 1, 0, 0, 0, 8, 290, 1, 0, 0, 0, 8, 292, 1, 0, 0, 0, 8, 294, 1, 0, 0, 0, 8, 296, 1, 0, 0, 0, 8, 298, 1, 0, 0, 0, 9, 300, 1, 0, 0, 0, 9, 302, 1, 0, 0, 0, 9, 304, 1, 0, 0, 0, 9, 306, 1, 0, 0, 0, 9, 308, 1, 0, 0, 0, 10, 310, 1, 0, 0, 0, 10, 312, 1, 0, 0, 0, 10, 314, 1, 0, 0, 0, 10, 316, 1, 0, 0, 0, 10, 318, 1, 0, 0, 0, 11, 320, 1, 0, 0, 0, 11, 322, 1, 0, 0, 0, 11, 324, 1, 0, 0, 0, 11, 326, 1, 0, 0, 0, 11, 328, 1, 0, 0, 0, 11, 330, 1, 0, 0, 0, 12, 332, 1, 0, 0, 0, 14, 342, 1, 0, 0, 0, 16, 349, 1, 0, 0, 0, 18, 358, 1, 0, 0, 0, 20, 365, 1, 0, 0, 0, 22, 375, 1, 0, 0, 0, 24, 382, 1, 0, 0, 0, 26, 389, 1, 0, 0, 0, 28, 403, 1, 0, 0, 0, 30, 410, 1, 0, 0, 0, 32, 418, 1, 0, 0, 0, 34, 425, 1, 0, 0, 0, 36, 437, 1, 0, 0, 0, 38, 446, 1, 0, 0, 0, 40, 452, 1, 0, 0, 0, 42, 459, 1, 0, 0, 0, 44, 466, 1, 0, 0, 0, 46, 474, 1, 0, 0, 0, 48, 483, 1, 0, 0, 0, 50, 489, 1, 0, 0, 0, 52, 506, 1, 0, 0, 0, 54, 522, 1, 0, 0, 0, 56, 528, 1, 0, 0, 0, 58, 533, 1, 0, 0, 0, 60, 538, 1, 0, 0, 0, 62, 542, 1, 0, 0, 0, 64, 546, 1, 0, 0, 0, 66, 550, 1, 0, 0, 0, 68, 554, 1, 0, 0, 0, 70, 556, 1, 0, 0, 0, 72, 558, 1, 0, 0, 0, 74, 561, 1, 0, 0, 0, 76, 563, 1, 0, 0, 0, 78, 572, 1, 0, 0, 0, 80, 574, 1, 0, 0, 0, 82, 579, 1, 0, 0, 0, 84, 581, 1, 0, 0, 0, 86, 586, 1, 0, 0, 0, 88, 617, 1, 0, 0, 0, 90, 620, 1, 0, 0, 0, 92, 666, 1, 0, 0, 0, 94, 668, 1, 0, 0, 0, 96, 671, 1, 0, 0, 0, 98, 675, 1, 0, 0, 0, 100, 679, 1, 0, 0, 0, 102, 681, 1, 0, 0, 0, 104, 684, 1, 0, 0, 0, 106, 686, 1, 0, 0, 0, 108, 691, 1, 0, 0, 0, 110, 693, 1, 0, 0, 0, 112, 699, 1, 0, 0, 0, 114, 705, 1, 0, 0, 0, 116, 710, 1, 0, 0, 0, 118, 712, 1, 0, 0, 0, 120, 715, 1, 0, 0, 0, 122, 718, 1, 0, 0, 0, 124, 723, 1, 0, 0, 0, 126, 727, 1, 0, 0, 0, 128, 732, 1, 0, 0, 0, 130, 738, 1, 0, 0, 0, 132, 741, 1, 0, 0, 0, 134, 743, 1, 0, 0, 0, 136, 749, 1, 0, 0, 0, 138, 751, 1, 0, 0, 0, 140, 756, 1, 0, 0, 0, 142, 759, 1, 0, 0, 0, 144, 762, 1, 0, 0, 0, 146, 765, 1, 0, 0, 0, 148, 767, 1, 0, 0, 0, 150, 770, 1, 0, 0, 0, 152, 772, 1, 0, 0, 0, 154, 775, 1, 0, 0, 0, 156, 777, 1, 0, 0, 0, 158, 779, 1, 0, 0, 0, 160, 781, 1, 0, 0, 0, 162, 783, 1, 0, 0, 0, 164, 785, 1, 0, 0, 0, 166, 790, 1, 0, 0, 0, 168, 811, 1, 0, 0, 0, 170, 813, 1, 0, 0, 0, 172, 821, 1, 0, 0, 0, 174, 823, 1, 0, 0, 0, 176, 827, 1, 0, 0, 0, 178, 831, 1, 0, 0, 0, 180, 835, 1, 0, 0, 0, 182, 840, 1, 0, 0, 0, 184, 844, 1, 0, 0, 0, 186, 848, 1, 0, 0, 0, 188, 852, 1, 0, 0, 0, 190, 856, 1, 0, 0, 0, 192, 860, 1, 0, 0, 0, 194, 868, 1, 0, 0, 0, 196, 880, 1, 0, 0, 0, 198, 883, 1, 0, 0, 0, 200, 887, 1, 0, 0, 0, 202, 891, 1, 0, 0, 0, 204, 895, 1, 0, 0, 0, 206, 899, 1, 0, 0, 0, 208, 903, 1, 0, 0, 0, 210, 908, 1, 0, 0, 0, 212, 912, 1, 0, 0, 0, 214, 920, 1, 0, 0, 0, 216, 941, 1, 0, 0, 0, 218, 945, 1, 0, 0, 0, 220, 949, 1, 0, 0, 0, 222, 953, 1, 0, 0, 0, 224, 957, 1, 0, 0, 0, 226, 961, 1, 0, 0, 0, 228, 966, 1, 0, 0, 0, 230, 970, 1, 0, 0, 0, 232, 974, 1, 0, 0, 0, 234, 978, 1, 0, 0, 0, 236, 981, 1, 0, 0, 0, 238, 985, 1, 0, 0, 0, 240, 989, 1, 0, 0, 0, 242, 993, 1, 0, 0, 0, 244, 997, 1, 0, 0, 0, 246, 1002, 1, 0, 0, 0, 248, 1007, 1, 0, 0, 0, 250, 1012, 1, 0, 0, 0, 252, 1019, 1, 0, 0, 0, 254, 1028, 1, 0, 0, 0, 256, 1035, 1, 0, 0, 0, 258, 1039, 1, 0, 0, 0, 260, 1043, 1, 0, 0, 0, 262, 1047, 1, 0, 0, 0, 264, 1051, 1, 0, 0, 0, 266, 1055, 1, 0, 0, 0, 268, 1061, 1, 0, 0, 0, 270, 1065, 1, 0, 0, 0, 272, 1069, 1, 0, 0, 0, 274, 1073, 1, 0, 0, 0, 276, 1077, 1, 0, 0, 0, 278, 1081, 1, 0, 0, 0, 280, 1085, 1, 0, 0, 0, 282, 1089, 1, 0, 0, 0, 284, 1093, 1, 0, 0, 0, 286, 1097, 1, 0, 0, 0, 288, 1102, 1, 0, 0, 0, 290, 1106, 1, 0, 0, 0, 292, 1110, 1, 0, 0, 0, 294, 1114, 1, 0, 0, 0, 296, 1118, 1, 0, 0, 0, 298, 1122, 1, 0, 0, 0, 300, 1126, 1, 0, 0, 0, 302, 1131, 1, 0, 0, 0, 304, 1136, 1, 0, 0, 0, 306, 1140, 1, 0, 0, 0, 308, 1144, 1, 0, 0, 0, 310, 1148, 1, 0, 0, 0, 312, 1153, 1, 0, 0, 0, 314, 1163, 1, 0, 0, 0, 316, 1167, 1, 0, 0, 0, 318, 1171, 1, 0, 0, 0, 320, 1175, 1, 0, 0, 0, 322, 1180, 1, 0, 0, 0, 324, 1187, 1, 0, 0, 0, 326, 1191, 1, 0, 0, 0, 328, 1195, 1, 0, 0, 0, 330, 1199, 1, 0, 0, 0, 332, 333, 5, 100, 0, 0, 333, 334, 5, 105, 0, 0, 334, 335, 5, 115, 0, 0, 335, 336, 5, 115, 0, 0, 336, 337, 5, 101, 0, 0, 337, 338, 5, 99, 0, 0, 338, 339, 5, 116, 0, 0, 339, 340, 1, 0, 0, 0, 340, 341, 6, 0, 0, 0, 341, 13, 1, 0, 0, 0, 342, 343, 5, 100, 0, 0, 343, 344, 5, 114, 0, 0, 344, 345, 5, 111, 0, 0, 345, 346, 5, 112, 0, 0, 346, 347, 1, 0, 0, 0, 347, 348, 6, 1, 1, 0, 348, 15, 1, 0, 0, 0, 349, 350, 5, 101, 0, 0, 350, 351, 5, 110, 0, 0, 351, 352, 5, 114, 0, 0, 352, 353, 5, 105, 0, 0, 353, 354, 5, 99, 0, 0, 354, 355, 5, 104, 0, 0, 355, 356, 1, 0, 0, 0, 356, 357, 6, 2, 2, 0, 357, 17, 1, 0, 0, 0, 358, 359, 5, 101, 0, 0, 359, 360, 5, 118, 0, 0, 360, 361, 5, 97, 0, 0, 361, 362, 5, 108, 0, 0, 362, 363, 1, 0, 0, 0, 363, 364, 6, 3, 0, 0, 364, 19, 1, 0, 0, 0, 365, 366, 5, 101, 0, 0, 366, 367, 5, 120, 0, 0, 367, 368, 5, 112, 0, 0, 368, 369, 5, 108, 0, 0, 369, 370, 5, 97, 0, 0, 370, 371, 5, 105, 0, 0, 371, 372, 5, 110, 0, 0, 372, 373, 1, 0, 0, 0, 373, 374, 6, 4, 3, 0, 374, 21, 1, 0, 0, 0, 375, 376, 5, 102, 0, 0, 376, 377, 5, 114, 0, 0, 377, 378, 5, 111, 0, 0, 378, 379, 5, 109, 0, 0, 379, 380, 1, 0, 0, 0, 380, 381, 6, 5, 4, 0, 381, 23, 1, 0, 0, 0, 382, 383, 5, 103, 0, 0, 383, 384, 5, 114, 0, 0, 384, 385, 5, 111, 0, 0, 385, 386, 5, 107, 0, 0, 386, 387, 1, 0, 0, 0, 387, 388, 6, 6, 0, 0, 388, 25, 1, 0, 0, 0, 389, 390, 5, 105, 0, 0, 390, 391, 5, 110, 0, 0, 391, 392, 5, 108, 0, 0, 392, 393, 5, 105, 0, 0, 393, 394, 5, 110, 0, 0, 394, 395, 5, 101, 0, 0, 395, 396, 5, 115, 0, 0, 396, 397, 5, 116, 0, 0, 397, 398, 5, 97, 0, 0, 398, 399, 5, 116, 0, 0, 399, 400, 5, 115, 0, 0, 400, 401, 1, 0, 0, 0, 401, 402, 6, 7, 0, 0, 402, 27, 1, 0, 0, 0, 403, 404, 5, 107, 0, 0, 404, 405, 5, 101, 0, 0, 405, 406, 5, 101, 0, 0, 406, 407, 5, 112, 0, 0, 407, 408, 1, 0, 0, 0, 408, 409, 6, 8, 1, 0, 409, 29, 1, 0, 0, 0, 410, 411, 5, 108, 0, 0, 411, 412, 5, 105, 0, 0, 412, 413, 5, 109, 0, 0, 413, 414, 5, 105, 0, 0, 414, 415, 5, 116, 0, 0, 415, 416, 1, 0, 0, 0, 416, 417, 6, 9, 0, 0, 417, 31, 1, 0, 0, 0, 418, 419, 5, 109, 0, 0, 419, 420, 5, 101, 0, 0, 420, 421, 5, 116, 0, 0, 421, 422, 5, 97, 0, 0, 422, 423, 1, 0, 0, 0, 423, 424, 6, 10, 5, 0, 424, 33, 1, 0, 0, 0, 425, 426, 5, 109, 0, 0, 426, 427, 5, 118, 0, 0, 427, 428, 5, 95, 0, 0, 428, 429, 5, 101, 0, 0, 429, 430, 5, 120, 0, 0, 430, 431, 5, 112, 0, 0, 431, 432, 5, 97, 0, 0, 432, 433, 5, 110, 0, 0, 433, 434, 5, 100, 0, 0, 434, 435, 1, 0, 0, 0, 435, 436, 6, 11, 6, 0, 436, 35, 1, 0, 0, 0, 437, 438, 5, 114, 0, 0, 438, 439, 5, 101, 0, 0, 439, 440, 5, 110, 0, 0, 440, 441, 5, 97, 0, 0, 441, 442, 5, 109, 0, 0, 442, 443, 5, 101, 0, 0, 443, 444, 1, 0, 0, 0, 444, 445, 6, 12, 7, 0, 445, 37, 1, 0, 0, 0, 446, 447, 5, 114, 0, 0, 447, 448, 5, 111, 0, 0, 448, 449, 5, 119, 0, 0, 449, 450, 1, 0, 0, 0, 450, 451, 6, 13, 0, 0, 451, 39, 1, 0, 0, 0, 452, 453, 5, 115, 0, 0, 453, 454, 5, 104, 0, 0, 454, 455, 5, 111, 0, 0, 455, 456, 5, 119, 0, 0, 456, 457, 1, 0, 0, 0, 457, 458, 6, 14, 8, 0, 458, 41, 1, 0, 0, 0, 459, 460, 5, 115, 0, 0, 460, 461, 5, 111, 0, 0, 461, 462, 5, 114, 0, 0, 462, 463, 5, 116, 0, 0, 463, 464, 1, 0, 0, 0, 464, 465, 6, 15, 0, 0, 465, 43, 1, 0, 0, 0, 466, 467, 5, 115, 0, 0, 467, 468, 5, 116, 0, 0, 468, 469, 5, 97, 0, 0, 469, 470, 5, 116, 0, 0, 470, 471, 5, 115, 0, 0, 471, 472, 1, 0, 0, 0, 472, 473, 6, 16, 0, 0, 473, 45, 1, 0, 0, 0, 474, 475, 5, 119, 0, 0, 475, 476, 5, 104, 0, 0, 476, 477, 5, 101, 0, 0, 477, 478, 5, 114, 0, 0, 478, 479, 5, 101, 0, 0, 479, 480, 1, 0, 0, 0, 480, 481, 6, 17, 0, 0, 481, 47, 1, 0, 0, 0, 482, 484, 8, 0, 0, 0, 483, 482, 1, 0, 0, 0, 484, 485, 1, 0, 0, 0, 485, 483, 1, 0, 0, 0, 485, 486, 1, 0, 0, 0, 486, 487, 1, 0, 0, 0, 487, 488, 6, 18, 0, 0, 488, 49, 1, 0, 0, 0, 489, 490, 5, 47, 0, 0, 490, 491, 5, 47, 0, 0, 491, 495, 1, 0, 0, 0, 492, 494, 8, 1, 0, 0, 493, 492, 1, 0, 0, 0, 494, 497, 1, 0, 0, 0, 495, 493, 1, 0, 0, 0, 495, 496, 1, 0, 0, 0, 496, 499, 1, 0, 0, 0, 497, 495, 1, 0, 0, 0, 498, 500, 5, 13, 0, 0, 499, 498, 1, 0, 0, 0, 499, 500, 1, 0, 0, 0, 500, 502, 1, 0, 0, 0, 501, 503, 5, 10, 0, 0, 502, 501, 1, 0, 0, 0, 502, 503, 1, 0, 0, 0, 503, 504, 1, 0, 0, 0, 504, 505, 6, 19, 9, 0, 505, 51, 1, 0, 0, 0, 506, 507, 5, 47, 0, 0, 507, 508, 5, 42, 0, 0, 508, 513, 1, 0, 0, 0, 509, 512, 3, 52, 20, 0, 510, 512, 9, 0, 0, 0, 511, 509, 1, 0, 0, 0, 511, 510, 1, 0, 0, 0, 512, 515, 1, 0, 0, 0, 513, 514, 1, 0, 0, 0, 513, 511, 1, 0, 0, 0, 514, 516, 1, 0, 0, 0, 515, 513, 1, 0, 0, 0, 516, 517, 5, 42, 0, 0, 517, 518, 5, 47, 0, 0, 518, 519, 1, 0, 0, 0, 519, 520, 6, 20, 9, 0, 520, 53, 1, 0, 0, 0, 521, 523, 7, 2, 0, 0, 522, 521, 1, 0, 0, 0, 523, 524, 1, 0, 0, 0, 524, 522, 1, 0, 0, 0, 524, 525, 1, 0, 0, 0, 525, 526, 1, 0, 0, 0, 526, 527, 6, 21, 9, 0, 527, 55, 1, 0, 0, 0, 528, 529, 3, 164, 76, 0, 529, 530, 1, 0, 0, 0, 530, 531, 6, 22, 10, 0, 531, 532, 6, 22, 11, 0, 532, 57, 1, 0, 0, 0, 533, 534, 3, 66, 27, 0, 534, 535, 1, 0, 0, 0, 535, 536, 6, 23, 12, 0, 536, 537, 6, 23, 13, 0, 537, 59, 1, 0, 0, 0, 538, 539, 3, 54, 21, 0, 539, 540, 1, 0, 0, 0, 540, 541, 6, 24, 9, 0, 541, 61, 1, 0, 0, 0, 542, 543, 3, 50, 19, 0, 543, 544, 1, 0, 0, 0, 544, 545, 6, 25, 9, 0, 545, 63, 1, 0, 0, 0, 546, 547, 3, 52, 20, 0, 547, 548, 1, 0, 0, 0, 548, 549, 6, 26, 9, 0, 549, 65, 1, 0, 0, 0, 550, 551, 5, 124, 0, 0, 551, 552, 1, 0, 0, 0, 552, 553, 6, 27, 13, 0, 553, 67, 1, 0, 0, 0, 554, 555, 7, 3, 0, 0, 555, 69, 1, 0, 0, 0, 556, 557, 7, 4, 0, 0, 557, 71, 1, 0, 0, 0, 558, 559, 5, 92, 0, 0, 559, 560, 7, 5, 0, 0, 560, 73, 1, 0, 0, 0, 561, 562, 8, 6, 0, 0, 562, 75, 1, 0, 0, 0, 563, 565, 7, 7, 0, 0, 564, 566, 7, 8, 0, 0, 565, 564, 1, 0, 0, 0, 565, 566, 1, 0, 0, 0, 566, 568, 1, 0, 0, 0, 567, 569, 3, 68, 28, 0, 568, 567, 1, 0, 0, 0, 569, 570, 1, 0, 0, 0, 570, 568, 1, 0, 0, 0, 570, 571, 1, 0, 0, 0, 571, 77, 1, 0, 0, 0, 572, 573, 5, 64, 0, 0, 573, 79, 1, 0, 0, 0, 574, 575, 5, 96, 0, 0, 575, 81, 1, 0, 0, 0, 576, 580, 8, 9, 0, 0, 577, 578, 5, 96, 0, 0, 578, 580, 5, 96, 0, 0, 579, 576, 1, 0, 0, 0, 579, 577, 1, 0, 0, 0, 580, 83, 1, 0, 0, 0, 581, 582, 5, 95, 0, 0, 582, 85, 1, 0, 0, 0, 583, 587, 3, 70, 29, 0, 584, 587, 3, 68, 28, 0, 585, 587, 3, 84, 36, 0, 586, 583, 1, 0, 0, 0, 586, 584, 1, 0, 0, 0, 586, 585, 1, 0, 0, 0, 587, 87, 1, 0, 0, 0, 588, 593, 5, 34, 0, 0, 589, 592, 3, 72, 30, 0, 590, 592, 3, 74, 31, 0, 591, 589, 1, 0, 0, 0, 591, 590, 1, 0, 0, 0, 592, 595, 1, 0, 0, 0, 593, 591, 1, 0, 0, 0, 593, 594, 1, 0, 0, 0, 594, 596, 1, 0, 0, 0, 595, 593, 1, 0, 0, 0, 596, 618, 5, 34, 0, 0, 597, 598, 5, 34, 0, 0, 598, 599, 5, 34, 0, 0, 599, 600, 5, 34, 0, 0, 600, 604, 1, 0, 0, 0, 601, 603, 8, 1, 0, 0, 602, 601, 1, 0, 0, 0, 603, 606, 1, 0, 0, 0, 604, 605, 1, 0, 0, 0, 604, 602, 1, 0, 0, 0, 605, 607, 1, 0, 0, 0, 606, 604, 1, 0, 0, 0, 607, 608, 5, 34, 0, 0, 608, 609, 5, 34, 0, 0, 609, 610, 5, 34, 0, 0, 610, 612, 1, 0, 0, 0, 611, 613, 5, 34, 0, 0, 612, 611, 1, 0, 0, 0, 612, 613, 1, 0, 0, 0, 613, 615, 1, 0, 0, 0, 614, 616, 5, 34, 0, 0, 615, 614, 1, 0, 0, 0, 615, 616, 1, 0, 0, 0, 616, 618, 1, 0, 0, 0, 617, 588, 1, 0, 0, 0, 617, 597, 1, 0, 0, 0, 618, 89, 1, 0, 0, 0, 619, 621, 3, 68, 28, 0, 620, 619, 1, 0, 0, 0, 621, 622, 1, 0, 0, 0, 622, 620, 1, 0, 0, 0, 622, 623, 1, 0, 0, 0, 623, 91, 1, 0, 0, 0, 624, 626, 3, 68, 28, 0, 625, 624, 1, 0, 0, 0, 626, 627, 1, 0, 0, 0, 627, 625, 1, 0, 0, 0, 627, 628, 1, 0, 0, 0, 628, 629, 1, 0, 0, 0, 629, 633, 3, 108, 48, 0, 630, 632, 3, 68, 28, 0, 631, 630, 1, 0, 0, 0, 632, 635, 1, 0, 0, 0, 633, 631, 1, 0, 0, 0, 633, 634, 1, 0, 0, 0, 634, 667, 1, 0, 0, 0, 635, 633, 1, 0, 0, 0, 636, 638, 3, 108, 48, 0, 637, 639, 3, 68, 28, 0, 638, 637, 1, 0, 0, 0, 639, 640, 1, 0, 0, 0, 640, 638, 1, 0, 0, 0, 640, 641, 1, 0, 0, 0, 641, 667, 1, 0, 0, 0, 642, 644, 3, 68, 28, 0, 643, 642, 1, 0, 0, 0, 644, 645, 1, 0, 0, 0, 645, 643, 1, 0, 0, 0, 645, 646, 1, 0, 0, 0, 646, 654, 1, 0, 0, 0, 647, 651, 3, 108, 48, 0, 648, 650, 3, 68, 28, 0, 649, 648, 1, 0, 0, 0, 650, 653, 1, 0, 0, 0, 651, 649, 1, 0, 0, 0, 651, 652, 1, 0, 0, 0, 652, 655, 1, 0, 0, 0, 653, 651, 1, 0, 0, 0, 654, 647, 1, 0, 0, 0, 654, 655, 1, 0, 0, 0, 655, 656, 1, 0, 0, 0, 656, 657, 3, 76, 32, 0, 657, 667, 1, 0, 0, 0, 658, 660, 3, 108, 48, 0, 659, 661, 3, 68, 28, 0, 660, 659, 1, 0, 0, 0, 661, 662, 1, 0, 0, 0, 662, 660, 1, 0, 0, 0, 662, 663, 1, 0, 0, 0, 663, 664, 1, 0, 0, 0, 664, 665, 3, 76, 32, 0, 665, 667, 1, 0, 0, 0, 666, 625, 1, 0, 0, 0, 666, 636, 1, 0, 0, 0, 666, 643, 1, 0, 0, 0, 666, 658, 1, 0, 0, 0, 667, 93, 1, 0, 0, 0, 668, 669, 5, 98, 0, 0, 669, 670, 5, 121, 0, 0, 670, 95, 1, 0, 0, 0, 671, 672, 5, 97, 0, 0, 672, 673, 5, 110, 0, 0, 673, 674, 5, 100, 0, 0, 674, 97, 1, 0, 0, 0, 675, 676, 5, 97, 0, 0, 676, 677, 5, 115, 0, 0, 677, 678, 5, 99, 0, 0, 678, 99, 1, 0, 0, 0, 679, 680, 5, 61, 0, 0, 680, 101, 1, 0, 0, 0, 681, 682, 5, 58, 0, 0, 682, 683, 5, 58, 0, 0, 683, 103, 1, 0, 0, 0, 684, 685, 5, 44, 0, 0, 685, 105, 1, 0, 0, 0, 686, 687, 5, 100, 0, 0, 687, 688, 5, 101, 0, 0, 688, 689, 5, 115, 0, 0, 689, 690, 5, 99, 0, 0, 690, 107, 1, 0, 0, 0, 691, 692, 5, 46, 0, 0, 692, 109, 1, 0, 0, 0, 693, 694, 5, 102, 0, 0, 694, 695, 5, 97, 0, 0, 695, 696, 5, 108, 0, 0, 696, 697, 5, 115, 0, 0, 697, 698, 5, 101, 0, 0, 698, 111, 1, 0, 0, 0, 699, 700, 5, 102, 0, 0, 700, 701, 5, 105, 0, 0, 701, 702, 5, 114, 0, 0, 702, 703, 5, 115, 0, 0, 703, 704, 5, 116, 0, 0, 704, 113, 1, 0, 0, 0, 705, 706, 5, 108, 0, 0, 706, 707, 5, 97, 0, 0, 707, 708, 5, 115, 0, 0, 708, 709, 5, 116, 0, 0, 709, 115, 1, 0, 0, 0, 710, 711, 5, 40, 0, 0, 711, 117, 1, 0, 0, 0, 712, 713, 5, 105, 0, 0, 713, 714, 5, 110, 0, 0, 714, 119, 1, 0, 0, 0, 715, 716, 5, 105, 0, 0, 716, 717, 5, 115, 0, 0, 717, 121, 1, 0, 0, 0, 718, 719, 5, 108, 0, 0, 719, 720, 5, 105, 0, 0, 720, 721, 5, 107, 0, 0, 721, 722, 5, 101, 0, 0, 722, 123, 1, 0, 0, 0, 723, 724, 5, 110, 0, 0, 724, 725, 5, 111, 0, 0, 725, 726, 5, 116, 0, 0, 726, 125, 1, 0, 0, 0, 727, 728, 5, 110, 0, 0, 728, 729, 5, 117, 0, 0, 729, 730, 5, 108, 0, 0, 730, 731, 5, 108, 0, 0, 731, 127, 1, 0, 0, 0, 732, 733, 5, 110, 0, 0, 733, 734, 5, 117, 0, 0, 734, 735, 5, 108, 0, 0, 735, 736, 5, 108, 0, 0, 736, 737, 5, 115, 0, 0, 737, 129, 1, 0, 0, 0, 738, 739, 5, 111, 0, 0, 739, 740, 5, 114, 0, 0, 740, 131, 1, 0, 0, 0, 741, 742, 5, 63, 0, 0, 742, 133, 1, 0, 0, 0, 743, 744, 5, 114, 0, 0, 744, 745, 5, 108, 0, 0, 745, 746, 5, 105, 0, 0, 746, 747, 5, 107, 0, 0, 747, 748, 5, 101, 0, 0, 748, 135, 1, 0, 0, 0, 749, 750, 5, 41, 0, 0, 750, 137, 1, 0, 0, 0, 751, 752, 5, 116, 0, 0, 752, 753, 5, 114, 0, 0, 753, 754, 5, 117, 0, 0, 754, 755, 5, 101, 0, 0, 755, 139, 1, 0, 0, 0, 756, 757, 5, 61, 0, 0, 757, 758, 5, 61, 0, 0, 758, 141, 1, 0, 0, 0, 759, 760, 5, 61, 0, 0, 760, 761, 5, 126, 0, 0, 761, 143, 1, 0, 0, 0, 762, 763, 5, 33, 0, 0, 763, 764, 5, 61, 0, 0, 764, 145, 1, 0, 0, 0, 765, 766, 5, 60, 0, 0, 766, 147, 1, 0, 0, 0, 767, 768, 5, 60, 0, 0, 768, 769, 5, 61, 0, 0, 769, 149, 1, 0, 0, 0, 770, 771, 5, 62, 0, 0, 771, 151, 1, 0, 0, 0, 772, 773, 5, 62, 0, 0, 773, 774, 5, 61, 0, 0, 774, 153, 1, 0, 0, 0, 775, 776, 5, 43, 0, 0, 776, 155, 1, 0, 0, 0, 777, 778, 5, 45, 0, 0, 778, 157, 1, 0, 0, 0, 779, 780, 5, 42, 0, 0, 780, 159, 1, 0, 0, 0, 781, 782, 5, 47, 0, 0, 782, 161, 1, 0, 0, 0, 783, 784, 5, 37, 0, 0, 784, 163, 1, 0, 0, 0, 785, 786, 5, 91, 0, 0, 786, 787, 1, 0, 0, 0, 787, 788, 6, 76, 0, 0, 788, 789, 6, 76, 0, 0, 789, 165, 1, 0, 0, 0, 790, 791, 5, 93, 0, 0, 791, 792, 1, 0, 0, 0, 792, 793, 6, 77, 13, 0, 793, 794, 6, 77, 13, 0, 794, 167, 1, 0, 0, 0, 795, 799, 3, 70, 29, 0, 796, 798, 3, 86, 37, 0, 797, 796, 1, 0, 0, 0, 798, 801, 1, 0, 0, 0, 799, 797, 1, 0, 0, 0, 799, 800, 1, 0, 0, 0, 800, 812, 1, 0, 0, 0, 801, 799, 1, 0, 0, 0, 802, 805, 3, 84, 36, 0, 803, 805, 3, 78, 33, 0, 804, 802, 1, 0, 0, 0, 804, 803, 1, 0, 0, 0, 805, 807, 1, 0, 0, 0, 806, 808, 3, 86, 37, 0, 807, 806, 1, 0, 0, 0, 808, 809, 1, 0, 0, 0, 809, 807, 1, 0, 0, 0, 809, 810, 1, 0, 0, 0, 810, 812, 1, 0, 0, 0, 811, 795, 1, 0, 0, 0, 811, 804, 1, 0, 0, 0, 812, 169, 1, 0, 0, 0, 813, 815, 3, 80, 34, 0, 814, 816, 3, 82, 35, 0, 815, 814, 1, 0, 0, 0, 816, 817, 1, 0, 0, 0, 817, 815, 1, 0, 0, 0, 817, 818, 1, 0, 0, 0, 818, 819, 1, 0, 0, 0, 819, 820, 3, 80, 34, 0, 820, 171, 1, 0, 0, 0, 821, 822, 3, 170, 79, 0, 822, 173, 1, 0, 0, 0, 823, 824, 3, 50, 19, 0, 824, 825, 1, 0, 0, 0, 825, 826, 6, 81, 9, 0, 826, 175, 1, 0, 0, 0, 827, 828, 3, 52, 20, 0, 828, 829, 1, 0, 0, 0, 829, 830, 6, 82, 9, 0, 830, 177, 1, 0, 0, 0, 831, 832, 3, 54, 21, 0, 832, 833, 1, 0, 0, 0, 833, 834, 6, 83, 9, 0, 834, 179, 1, 0, 0, 0, 835, 836, 3, 66, 27, 0, 836, 837, 1, 0, 0, 0, 837, 838, 6, 84, 12, 0, 838, 839, 6, 84, 13, 0, 839, 181, 1, 0, 0, 0, 840, 841, 3, 164, 76, 0, 841, 842, 1, 0, 0, 0, 842, 843, 6, 85, 10, 0, 843, 183, 1, 0, 0, 0, 844, 845, 3, 166, 77, 0, 845, 846, 1, 0, 0, 0, 846, 847, 6, 86, 14, 0, 847, 185, 1, 0, 0, 0, 848, 849, 3, 104, 46, 0, 849, 850, 1, 0, 0, 0, 850, 851, 6, 87, 15, 0, 851, 187, 1, 0, 0, 0, 852, 853, 3, 100, 44, 0, 853, 854, 1, 0, 0, 0, 854, 855, 6, 88, 16, 0, 855, 189, 1, 0, 0, 0, 856, 857, 3, 88, 38, 0, 857, 858, 1, 0, 0, 0, 858, 859, 6, 89, 17, 0, 859, 191, 1, 0, 0, 0, 860, 861, 5, 111, 0, 0, 861, 862, 5, 112, 0, 0, 862, 863, 5, 116, 0, 0, 863, 864, 5, 105, 0, 0, 864, 865, 5, 111, 0, 0, 865, 866, 5, 110, 0, 0, 866, 867, 5, 115, 0, 0, 867, 193, 1, 0, 0, 0, 868, 869, 5, 109, 0, 0, 869, 870, 5, 101, 0, 0, 870, 871, 5, 116, 0, 0, 871, 872, 5, 97, 0, 0, 872, 873, 5, 100, 0, 0, 873, 874, 5, 97, 0, 0, 874, 875, 5, 116, 0, 0, 875, 876, 5, 97, 0, 0, 876, 195, 1, 0, 0, 0, 877, 881, 8, 10, 0, 0, 878, 879, 5, 47, 0, 0, 879, 881, 8, 11, 0, 0, 880, 877, 1, 0, 0, 0, 880, 878, 1, 0, 0, 0, 881, 197, 1, 0, 0, 0, 882, 884, 3, 196, 92, 0, 883, 882, 1, 0, 0, 0, 884, 885, 1, 0, 0, 0, 885, 883, 1, 0, 0, 0, 885, 886, 1, 0, 0, 0, 886, 199, 1, 0, 0, 0, 887, 888, 3, 172, 80, 0, 888, 889, 1, 0, 0, 0, 889, 890, 6, 94, 18, 0, 890, 201, 1, 0, 0, 0, 891, 892, 3, 50, 19, 0, 892, 893, 1, 0, 0, 0, 893, 894, 6, 95, 9, 0, 894, 203, 1, 0, 0, 0, 895, 896, 3, 52, 20, 0, 896, 897, 1, 0, 0, 0, 897, 898, 6, 96, 9, 0, 898, 205, 1, 0, 0, 0, 899, 900, 3, 54, 21, 0, 900, 901, 1, 0, 0, 0, 901, 902, 6, 97, 9, 0, 902, 207, 1, 0, 0, 0, 903, 904, 3, 66, 27, 0, 904, 905, 1, 0, 0, 0, 905, 906, 6, 98, 12, 0, 906, 907, 6, 98, 13, 0, 907, 209, 1, 0, 0, 0, 908, 909, 3, 108, 48, 0, 909, 910, 1, 0, 0, 0, 910, 911, 6, 99, 19, 0, 911, 211, 1, 0, 0, 0, 912, 913, 3, 104, 46, 0, 913, 914, 1, 0, 0, 0, 914, 915, 6, 100, 15, 0, 915, 213, 1, 0, 0, 0, 916, 921, 3, 70, 29, 0, 917, 921, 3, 68, 28, 0, 918, 921, 3, 84, 36, 0, 919, 921, 3, 158, 73, 0, 920, 916, 1, 0, 0, 0, 920, 917, 1, 0, 0, 0, 920, 918, 1, 0, 0, 0, 920, 919, 1, 0, 0, 0, 921, 215, 1, 0, 0, 0, 922, 925, 3, 70, 29, 0, 923, 925, 3, 158, 73, 0, 924, 922, 1, 0, 0, 0, 924, 923, 1, 0, 0, 0, 925, 929, 1, 0, 0, 0, 926, 928, 3, 214, 101, 0, 927, 926, 1, 0, 0, 0, 928, 931, 1, 0, 0, 0, 929, 927, 1, 0, 0, 0, 929, 930, 1, 0, 0, 0, 930, 942, 1, 0, 0, 0, 931, 929, 1, 0, 0, 0, 932, 935, 3, 84, 36, 0, 933, 935, 3, 78, 33, 0, 934, 932, 1, 0, 0, 0, 934, 933, 1, 0, 0, 0, 935, 937, 1, 0, 0, 0, 936, 938, 3, 214, 101, 0, 937, 936, 1, 0, 0, 0, 938, 939, 1, 0, 0, 0, 939, 937, 1, 0, 0, 0, 939, 940, 1, 0, 0, 0, 940, 942, 1, 0, 0, 0, 941, 924, 1, 0, 0, 0, 941, 934, 1, 0, 0, 0, 942, 217, 1, 0, 0, 0, 943, 946, 3, 216, 102, 0, 944, 946, 3, 170, 79, 0, 945, 943, 1, 0, 0, 0, 945, 944, 1, 0, 0, 0, 946, 947, 1, 0, 0, 0, 947, 945, 1, 0, 0, 0, 947, 948, 1, 0, 0, 0, 948, 219, 1, 0, 0, 0, 949, 950, 3, 50, 19, 0, 950, 951, 1, 0, 0, 0, 951, 952, 6, 104, 9, 0, 952, 221, 1, 0, 0, 0, 953, 954, 3, 52, 20, 0, 954, 955, 1, 0, 0, 0, 955, 956, 6, 105, 9, 0, 956, 223, 1, 0, 0, 0, 957, 958, 3, 54, 21, 0, 958, 959, 1, 0, 0, 0, 959, 960, 6, 106, 9, 0, 960, 225, 1, 0, 0, 0, 961, 962, 3, 66, 27, 0, 962, 963, 1, 0, 0, 0, 963, 964, 6, 107, 12, 0, 964, 965, 6, 107, 13, 0, 965, 227, 1, 0, 0, 0, 966, 967, 3, 100, 44, 0, 967, 968, 1, 0, 0, 0, 968, 969, 6, 108, 16, 0, 969, 229, 1, 0, 0, 0, 970, 971, 3, 104, 46, 0, 971, 972, 1, 0, 0, 0, 972, 973, 6, 109, 15, 0, 973, 231, 1, 0, 0, 0, 974, 975, 3, 108, 48, 0, 975, 976, 1, 0, 0, 0, 976, 977, 6, 110, 19, 0, 977, 233, 1, 0, 0, 0, 978, 979, 5, 97, 0, 0, 979, 980, 5, 115, 0, 0, 980, 235, 1, 0, 0, 0, 981, 982, 3, 218, 103, 0, 982, 983, 1, 0, 0, 0, 983, 984, 6, 112, 20, 0, 984, 237, 1, 0, 0, 0, 985, 986, 3, 50, 19, 0, 986, 987, 1, 0, 0, 0, 987, 988, 6, 113, 9, 0, 988, 239, 1, 0, 0, 0, 989, 990, 3, 52, 20, 0, 990, 991, 1, 0, 0, 0, 991, 992, 6, 114, 9, 0, 992, 241, 1, 0, 0, 0, 993, 994, 3, 54, 21, 0, 994, 995, 1, 0, 0, 0, 995, 996, 6, 115, 9, 0, 996, 243, 1, 0, 0, 0, 997, 998, 3, 66, 27, 0, 998, 999, 1, 0, 0, 0, 999, 1000, 6, 116, 12, 0, 1000, 1001, 6, 116, 13, 0, 1001, 245, 1, 0, 0, 0, 1002, 1003, 3, 164, 76, 0, 1003, 1004, 1, 0, 0, 0, 1004, 1005, 6, 117, 10, 0, 1005, 1006, 6, 117, 21, 0, 1006, 247, 1, 0, 0, 0, 1007, 1008, 5, 111, 0, 0, 1008, 1009, 5, 110, 0, 0, 1009, 1010, 1, 0, 0, 0, 1010, 1011, 6, 118, 22, 0, 1011, 249, 1, 0, 0, 0, 1012, 1013, 5, 119, 0, 0, 1013, 1014, 5, 105, 0, 0, 1014, 1015, 5, 116, 0, 0, 1015, 1016, 5, 104, 0, 0, 1016, 1017, 1, 0, 0, 0, 1017, 1018, 6, 119, 22, 0, 1018, 251, 1, 0, 0, 0, 1019, 1020, 8, 12, 0, 0, 1020, 253, 1, 0, 0, 0, 1021, 1023, 3, 252, 120, 0, 1022, 1021, 1, 0, 0, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1022, 1, 0, 0, 0, 1024, 1025, 1, 0, 0, 0, 1025, 1026, 1, 0, 0, 0, 1026, 1027, 3, 322, 155, 0, 1027, 1029, 1, 0, 0, 0, 1028, 1022, 1, 0, 0, 0, 1028, 1029, 1, 0, 0, 0, 1029, 1031, 1, 0, 0, 0, 1030, 1032, 3, 252, 120, 0, 1031, 1030, 1, 0, 0, 0, 1032, 1033, 1, 0, 0, 0, 1033, 1031, 1, 0, 0, 0, 1033, 1034, 1, 0, 0, 0, 1034, 255, 1, 0, 0, 0, 1035, 1036, 3, 172, 80, 0, 1036, 1037, 1, 0, 0, 0, 1037, 1038, 6, 122, 18, 0, 1038, 257, 1, 0, 0, 0, 1039, 1040, 3, 254, 121, 0, 1040, 1041, 1, 0, 0, 0, 1041, 1042, 6, 123, 23, 0, 1042, 259, 1, 0, 0, 0, 1043, 1044, 3, 50, 19, 0, 1044, 1045, 1, 0, 0, 0, 1045, 1046, 6, 124, 9, 0, 1046, 261, 1, 0, 0, 0, 1047, 1048, 3, 52, 20, 0, 1048, 1049, 1, 0, 0, 0, 1049, 1050, 6, 125, 9, 0, 1050, 263, 1, 0, 0, 0, 1051, 1052, 3, 54, 21, 0, 1052, 1053, 1, 0, 0, 0, 1053, 1054, 6, 126, 9, 0, 1054, 265, 1, 0, 0, 0, 1055, 1056, 3, 66, 27, 0, 1056, 1057, 1, 0, 0, 0, 1057, 1058, 6, 127, 12, 0, 1058, 1059, 6, 127, 13, 0, 1059, 1060, 6, 127, 13, 0, 1060, 267, 1, 0, 0, 0, 1061, 1062, 3, 100, 44, 0, 1062, 1063, 1, 0, 0, 0, 1063, 1064, 6, 128, 16, 0, 1064, 269, 1, 0, 0, 0, 1065, 1066, 3, 104, 46, 0, 1066, 1067, 1, 0, 0, 0, 1067, 1068, 6, 129, 15, 0, 1068, 271, 1, 0, 0, 0, 1069, 1070, 3, 108, 48, 0, 1070, 1071, 1, 0, 0, 0, 1071, 1072, 6, 130, 19, 0, 1072, 273, 1, 0, 0, 0, 1073, 1074, 3, 250, 119, 0, 1074, 1075, 1, 0, 0, 0, 1075, 1076, 6, 131, 24, 0, 1076, 275, 1, 0, 0, 0, 1077, 1078, 3, 218, 103, 0, 1078, 1079, 1, 0, 0, 0, 1079, 1080, 6, 132, 20, 0, 1080, 277, 1, 0, 0, 0, 1081, 1082, 3, 172, 80, 0, 1082, 1083, 1, 0, 0, 0, 1083, 1084, 6, 133, 18, 0, 1084, 279, 1, 0, 0, 0, 1085, 1086, 3, 50, 19, 0, 1086, 1087, 1, 0, 0, 0, 1087, 1088, 6, 134, 9, 0, 1088, 281, 1, 0, 0, 0, 1089, 1090, 3, 52, 20, 0, 1090, 1091, 1, 0, 0, 0, 1091, 1092, 6, 135, 9, 0, 1092, 283, 1, 0, 0, 0, 1093, 1094, 3, 54, 21, 0, 1094, 1095, 1, 0, 0, 0, 1095, 1096, 6, 136, 9, 0, 1096, 285, 1, 0, 0, 0, 1097, 1098, 3, 66, 27, 0, 1098, 1099, 1, 0, 0, 0, 1099, 1100, 6, 137, 12, 0, 1100, 1101, 6, 137, 13, 0, 1101, 287, 1, 0, 0, 0, 1102, 1103, 3, 108, 48, 0, 1103, 1104, 1, 0, 0, 0, 1104, 1105, 6, 138, 19, 0, 1105, 289, 1, 0, 0, 0, 1106, 1107, 3, 172, 80, 0, 1107, 1108, 1, 0, 0, 0, 1108, 1109, 6, 139, 18, 0, 1109, 291, 1, 0, 0, 0, 1110, 1111, 3, 168, 78, 0, 1111, 1112, 1, 0, 0, 0, 1112, 1113, 6, 140, 25, 0, 1113, 293, 1, 0, 0, 0, 1114, 1115, 3, 50, 19, 0, 1115, 1116, 1, 0, 0, 0, 1116, 1117, 6, 141, 9, 0, 1117, 295, 1, 0, 0, 0, 1118, 1119, 3, 52, 20, 0, 1119, 1120, 1, 0, 0, 0, 1120, 1121, 6, 142, 9, 0, 1121, 297, 1, 0, 0, 0, 1122, 1123, 3, 54, 21, 0, 1123, 1124, 1, 0, 0, 0, 1124, 1125, 6, 143, 9, 0, 1125, 299, 1, 0, 0, 0, 1126, 1127, 3, 66, 27, 0, 1127, 1128, 1, 0, 0, 0, 1128, 1129, 6, 144, 12, 0, 1129, 1130, 6, 144, 13, 0, 1130, 301, 1, 0, 0, 0, 1131, 1132, 5, 105, 0, 0, 1132, 1133, 5, 110, 0, 0, 1133, 1134, 5, 102, 0, 0, 1134, 1135, 5, 111, 0, 0, 1135, 303, 1, 0, 0, 0, 1136, 1137, 3, 50, 19, 0, 1137, 1138, 1, 0, 0, 0, 1138, 1139, 6, 146, 9, 0, 1139, 305, 1, 0, 0, 0, 1140, 1141, 3, 52, 20, 0, 1141, 1142, 1, 0, 0, 0, 1142, 1143, 6, 147, 9, 0, 1143, 307, 1, 0, 0, 0, 1144, 1145, 3, 54, 21, 0, 1145, 1146, 1, 0, 0, 0, 1146, 1147, 6, 148, 9, 0, 1147, 309, 1, 0, 0, 0, 1148, 1149, 3, 66, 27, 0, 1149, 1150, 1, 0, 0, 0, 1150, 1151, 6, 149, 12, 0, 1151, 1152, 6, 149, 13, 0, 1152, 311, 1, 0, 0, 0, 1153, 1154, 5, 102, 0, 0, 1154, 1155, 5, 117, 0, 0, 1155, 1156, 5, 110, 0, 0, 1156, 1157, 5, 99, 0, 0, 1157, 1158, 5, 116, 0, 0, 1158, 1159, 5, 105, 0, 0, 1159, 1160, 5, 111, 0, 0, 1160, 1161, 5, 110, 0, 0, 1161, 1162, 5, 115, 0, 0, 1162, 313, 1, 0, 0, 0, 1163, 1164, 3, 50, 19, 0, 1164, 1165, 1, 0, 0, 0, 1165, 1166, 6, 151, 9, 0, 1166, 315, 1, 0, 0, 0, 1167, 1168, 3, 52, 20, 0, 1168, 1169, 1, 0, 0, 0, 1169, 1170, 6, 152, 9, 0, 1170, 317, 1, 0, 0, 0, 1171, 1172, 3, 54, 21, 0, 1172, 1173, 1, 0, 0, 0, 1173, 1174, 6, 153, 9, 0, 1174, 319, 1, 0, 0, 0, 1175, 1176, 3, 166, 77, 0, 1176, 1177, 1, 0, 0, 0, 1177, 1178, 6, 154, 14, 0, 1178, 1179, 6, 154, 13, 0, 1179, 321, 1, 0, 0, 0, 1180, 1181, 5, 58, 0, 0, 1181, 323, 1, 0, 0, 0, 1182, 1188, 3, 78, 33, 0, 1183, 1188, 3, 68, 28, 0, 1184, 1188, 3, 108, 48, 0, 1185, 1188, 3, 70, 29, 0, 1186, 1188, 3, 84, 36, 0, 1187, 1182, 1, 0, 0, 0, 1187, 1183, 1, 0, 0, 0, 1187, 1184, 1, 0, 0, 0, 1187, 1185, 1, 0, 0, 0, 1187, 1186, 1, 0, 0, 0, 1188, 1189, 1, 0, 0, 0, 1189, 1187, 1, 0, 0, 0, 1189, 1190, 1, 0, 0, 0, 1190, 325, 1, 0, 0, 0, 1191, 1192, 3, 50, 19, 0, 1192, 1193, 1, 0, 0, 0, 1193, 1194, 6, 157, 9, 0, 1194, 327, 1, 0, 0, 0, 1195, 1196, 3, 52, 20, 0, 1196, 1197, 1, 0, 0, 0, 1197, 1198, 6, 158, 9, 0, 1198, 329, 1, 0, 0, 0, 1199, 1200, 3, 54, 21, 0, 1200, 1201, 1, 0, 0, 0, 1201, 1202, 6, 159, 9, 0, 1202, 331, 1, 0, 0, 0, 58, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 485, 495, 499, 502, 511, 513, 524, 565, 570, 579, 586, 591, 593, 604, 612, 615, 617, 622, 627, 633, 640, 645, 651, 654, 662, 666, 799, 804, 809, 811, 817, 880, 885, 920, 924, 929, 934, 939, 941, 945, 947, 1024, 1028, 1033, 1187, 1189, 26, 5, 2, 0, 5, 4, 0, 5, 6, 0, 5, 1, 0, 5, 3, 0, 5, 10, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 0, 1, 0, 7, 65, 0, 5, 0, 0, 7, 26, 0, 4, 0, 0, 7, 66, 0, 7, 35, 0, 7, 33, 0, 7, 27, 0, 7, 68, 0, 7, 37, 0, 7, 78, 0, 5, 11, 0, 5, 7, 0, 7, 88, 0, 7, 87, 0, 7, 67, 0] \ No newline at end of file +[4, 0, 110, 1197, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 4, 18, 482, 8, 18, 11, 18, 12, 18, 483, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 492, 8, 19, 10, 19, 12, 19, 495, 9, 19, 1, 19, 3, 19, 498, 8, 19, 1, 19, 3, 19, 501, 8, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 510, 8, 20, 10, 20, 12, 20, 513, 9, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 4, 21, 521, 8, 21, 11, 21, 12, 21, 522, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 3, 32, 564, 8, 32, 1, 32, 4, 32, 567, 8, 32, 11, 32, 12, 32, 568, 1, 33, 1, 33, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 3, 35, 578, 8, 35, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 3, 37, 585, 8, 37, 1, 38, 1, 38, 1, 38, 5, 38, 590, 8, 38, 10, 38, 12, 38, 593, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 601, 8, 38, 10, 38, 12, 38, 604, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 611, 8, 38, 1, 38, 3, 38, 614, 8, 38, 3, 38, 616, 8, 38, 1, 39, 4, 39, 619, 8, 39, 11, 39, 12, 39, 620, 1, 40, 4, 40, 624, 8, 40, 11, 40, 12, 40, 625, 1, 40, 1, 40, 5, 40, 630, 8, 40, 10, 40, 12, 40, 633, 9, 40, 1, 40, 1, 40, 4, 40, 637, 8, 40, 11, 40, 12, 40, 638, 1, 40, 4, 40, 642, 8, 40, 11, 40, 12, 40, 643, 1, 40, 1, 40, 5, 40, 648, 8, 40, 10, 40, 12, 40, 651, 9, 40, 3, 40, 653, 8, 40, 1, 40, 1, 40, 1, 40, 1, 40, 4, 40, 659, 8, 40, 11, 40, 12, 40, 660, 1, 40, 1, 40, 3, 40, 665, 8, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 1, 77, 1, 77, 1, 77, 1, 78, 1, 78, 5, 78, 796, 8, 78, 10, 78, 12, 78, 799, 9, 78, 1, 78, 1, 78, 3, 78, 803, 8, 78, 1, 78, 4, 78, 806, 8, 78, 11, 78, 12, 78, 807, 3, 78, 810, 8, 78, 1, 79, 1, 79, 4, 79, 814, 8, 79, 11, 79, 12, 79, 815, 1, 79, 1, 79, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 3, 92, 879, 8, 92, 1, 93, 4, 93, 882, 8, 93, 11, 93, 12, 93, 883, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 3, 100, 915, 8, 100, 1, 101, 1, 101, 3, 101, 919, 8, 101, 1, 101, 5, 101, 922, 8, 101, 10, 101, 12, 101, 925, 9, 101, 1, 101, 1, 101, 3, 101, 929, 8, 101, 1, 101, 4, 101, 932, 8, 101, 11, 101, 12, 101, 933, 3, 101, 936, 8, 101, 1, 102, 1, 102, 4, 102, 940, 8, 102, 11, 102, 12, 102, 941, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 120, 4, 120, 1017, 8, 120, 11, 120, 12, 120, 1018, 1, 120, 1, 120, 3, 120, 1023, 8, 120, 1, 120, 4, 120, 1026, 8, 120, 11, 120, 12, 120, 1027, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 4, 155, 1182, 8, 155, 11, 155, 12, 155, 1183, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 2, 511, 602, 0, 159, 12, 1, 14, 2, 16, 3, 18, 4, 20, 5, 22, 6, 24, 7, 26, 8, 28, 9, 30, 10, 32, 11, 34, 12, 36, 13, 38, 14, 40, 15, 42, 16, 44, 17, 46, 18, 48, 19, 50, 20, 52, 21, 54, 22, 56, 0, 58, 0, 60, 23, 62, 24, 64, 25, 66, 26, 68, 0, 70, 0, 72, 0, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 27, 90, 28, 92, 29, 94, 30, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 67, 170, 0, 172, 68, 174, 69, 176, 70, 178, 71, 180, 0, 182, 0, 184, 0, 186, 0, 188, 0, 190, 0, 192, 72, 194, 73, 196, 0, 198, 74, 200, 75, 202, 76, 204, 77, 206, 0, 208, 0, 210, 0, 212, 0, 214, 0, 216, 78, 218, 79, 220, 80, 222, 81, 224, 0, 226, 0, 228, 0, 230, 0, 232, 82, 234, 0, 236, 83, 238, 84, 240, 85, 242, 0, 244, 0, 246, 86, 248, 87, 250, 0, 252, 88, 254, 0, 256, 0, 258, 89, 260, 90, 262, 91, 264, 0, 266, 0, 268, 0, 270, 0, 272, 0, 274, 0, 276, 0, 278, 92, 280, 93, 282, 94, 284, 0, 286, 0, 288, 0, 290, 0, 292, 95, 294, 96, 296, 97, 298, 0, 300, 98, 302, 99, 304, 100, 306, 101, 308, 0, 310, 102, 312, 103, 314, 104, 316, 105, 318, 0, 320, 106, 322, 107, 324, 108, 326, 109, 328, 110, 12, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 5, 0, 34, 34, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 10, 0, 9, 10, 13, 13, 32, 32, 44, 44, 47, 47, 61, 61, 91, 91, 93, 93, 96, 96, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1224, 0, 12, 1, 0, 0, 0, 0, 14, 1, 0, 0, 0, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 1, 56, 1, 0, 0, 0, 1, 58, 1, 0, 0, 0, 1, 60, 1, 0, 0, 0, 1, 62, 1, 0, 0, 0, 1, 64, 1, 0, 0, 0, 2, 66, 1, 0, 0, 0, 2, 88, 1, 0, 0, 0, 2, 90, 1, 0, 0, 0, 2, 92, 1, 0, 0, 0, 2, 94, 1, 0, 0, 0, 2, 96, 1, 0, 0, 0, 2, 98, 1, 0, 0, 0, 2, 100, 1, 0, 0, 0, 2, 102, 1, 0, 0, 0, 2, 104, 1, 0, 0, 0, 2, 106, 1, 0, 0, 0, 2, 108, 1, 0, 0, 0, 2, 110, 1, 0, 0, 0, 2, 112, 1, 0, 0, 0, 2, 114, 1, 0, 0, 0, 2, 116, 1, 0, 0, 0, 2, 118, 1, 0, 0, 0, 2, 120, 1, 0, 0, 0, 2, 122, 1, 0, 0, 0, 2, 124, 1, 0, 0, 0, 2, 126, 1, 0, 0, 0, 2, 128, 1, 0, 0, 0, 2, 130, 1, 0, 0, 0, 2, 132, 1, 0, 0, 0, 2, 134, 1, 0, 0, 0, 2, 136, 1, 0, 0, 0, 2, 138, 1, 0, 0, 0, 2, 140, 1, 0, 0, 0, 2, 142, 1, 0, 0, 0, 2, 144, 1, 0, 0, 0, 2, 146, 1, 0, 0, 0, 2, 148, 1, 0, 0, 0, 2, 150, 1, 0, 0, 0, 2, 152, 1, 0, 0, 0, 2, 154, 1, 0, 0, 0, 2, 156, 1, 0, 0, 0, 2, 158, 1, 0, 0, 0, 2, 160, 1, 0, 0, 0, 2, 162, 1, 0, 0, 0, 2, 164, 1, 0, 0, 0, 2, 166, 1, 0, 0, 0, 2, 168, 1, 0, 0, 0, 2, 172, 1, 0, 0, 0, 2, 174, 1, 0, 0, 0, 2, 176, 1, 0, 0, 0, 2, 178, 1, 0, 0, 0, 3, 180, 1, 0, 0, 0, 3, 182, 1, 0, 0, 0, 3, 184, 1, 0, 0, 0, 3, 186, 1, 0, 0, 0, 3, 188, 1, 0, 0, 0, 3, 190, 1, 0, 0, 0, 3, 192, 1, 0, 0, 0, 3, 194, 1, 0, 0, 0, 3, 198, 1, 0, 0, 0, 3, 200, 1, 0, 0, 0, 3, 202, 1, 0, 0, 0, 3, 204, 1, 0, 0, 0, 4, 206, 1, 0, 0, 0, 4, 208, 1, 0, 0, 0, 4, 210, 1, 0, 0, 0, 4, 216, 1, 0, 0, 0, 4, 218, 1, 0, 0, 0, 4, 220, 1, 0, 0, 0, 4, 222, 1, 0, 0, 0, 5, 224, 1, 0, 0, 0, 5, 226, 1, 0, 0, 0, 5, 228, 1, 0, 0, 0, 5, 230, 1, 0, 0, 0, 5, 232, 1, 0, 0, 0, 5, 234, 1, 0, 0, 0, 5, 236, 1, 0, 0, 0, 5, 238, 1, 0, 0, 0, 5, 240, 1, 0, 0, 0, 6, 242, 1, 0, 0, 0, 6, 244, 1, 0, 0, 0, 6, 246, 1, 0, 0, 0, 6, 248, 1, 0, 0, 0, 6, 252, 1, 0, 0, 0, 6, 254, 1, 0, 0, 0, 6, 256, 1, 0, 0, 0, 6, 258, 1, 0, 0, 0, 6, 260, 1, 0, 0, 0, 6, 262, 1, 0, 0, 0, 7, 264, 1, 0, 0, 0, 7, 266, 1, 0, 0, 0, 7, 268, 1, 0, 0, 0, 7, 270, 1, 0, 0, 0, 7, 272, 1, 0, 0, 0, 7, 274, 1, 0, 0, 0, 7, 276, 1, 0, 0, 0, 7, 278, 1, 0, 0, 0, 7, 280, 1, 0, 0, 0, 7, 282, 1, 0, 0, 0, 8, 284, 1, 0, 0, 0, 8, 286, 1, 0, 0, 0, 8, 288, 1, 0, 0, 0, 8, 290, 1, 0, 0, 0, 8, 292, 1, 0, 0, 0, 8, 294, 1, 0, 0, 0, 8, 296, 1, 0, 0, 0, 9, 298, 1, 0, 0, 0, 9, 300, 1, 0, 0, 0, 9, 302, 1, 0, 0, 0, 9, 304, 1, 0, 0, 0, 9, 306, 1, 0, 0, 0, 10, 308, 1, 0, 0, 0, 10, 310, 1, 0, 0, 0, 10, 312, 1, 0, 0, 0, 10, 314, 1, 0, 0, 0, 10, 316, 1, 0, 0, 0, 11, 318, 1, 0, 0, 0, 11, 320, 1, 0, 0, 0, 11, 322, 1, 0, 0, 0, 11, 324, 1, 0, 0, 0, 11, 326, 1, 0, 0, 0, 11, 328, 1, 0, 0, 0, 12, 330, 1, 0, 0, 0, 14, 340, 1, 0, 0, 0, 16, 347, 1, 0, 0, 0, 18, 356, 1, 0, 0, 0, 20, 363, 1, 0, 0, 0, 22, 373, 1, 0, 0, 0, 24, 380, 1, 0, 0, 0, 26, 387, 1, 0, 0, 0, 28, 401, 1, 0, 0, 0, 30, 408, 1, 0, 0, 0, 32, 416, 1, 0, 0, 0, 34, 423, 1, 0, 0, 0, 36, 435, 1, 0, 0, 0, 38, 444, 1, 0, 0, 0, 40, 450, 1, 0, 0, 0, 42, 457, 1, 0, 0, 0, 44, 464, 1, 0, 0, 0, 46, 472, 1, 0, 0, 0, 48, 481, 1, 0, 0, 0, 50, 487, 1, 0, 0, 0, 52, 504, 1, 0, 0, 0, 54, 520, 1, 0, 0, 0, 56, 526, 1, 0, 0, 0, 58, 531, 1, 0, 0, 0, 60, 536, 1, 0, 0, 0, 62, 540, 1, 0, 0, 0, 64, 544, 1, 0, 0, 0, 66, 548, 1, 0, 0, 0, 68, 552, 1, 0, 0, 0, 70, 554, 1, 0, 0, 0, 72, 556, 1, 0, 0, 0, 74, 559, 1, 0, 0, 0, 76, 561, 1, 0, 0, 0, 78, 570, 1, 0, 0, 0, 80, 572, 1, 0, 0, 0, 82, 577, 1, 0, 0, 0, 84, 579, 1, 0, 0, 0, 86, 584, 1, 0, 0, 0, 88, 615, 1, 0, 0, 0, 90, 618, 1, 0, 0, 0, 92, 664, 1, 0, 0, 0, 94, 666, 1, 0, 0, 0, 96, 669, 1, 0, 0, 0, 98, 673, 1, 0, 0, 0, 100, 677, 1, 0, 0, 0, 102, 679, 1, 0, 0, 0, 104, 682, 1, 0, 0, 0, 106, 684, 1, 0, 0, 0, 108, 689, 1, 0, 0, 0, 110, 691, 1, 0, 0, 0, 112, 697, 1, 0, 0, 0, 114, 703, 1, 0, 0, 0, 116, 708, 1, 0, 0, 0, 118, 710, 1, 0, 0, 0, 120, 713, 1, 0, 0, 0, 122, 716, 1, 0, 0, 0, 124, 721, 1, 0, 0, 0, 126, 725, 1, 0, 0, 0, 128, 730, 1, 0, 0, 0, 130, 736, 1, 0, 0, 0, 132, 739, 1, 0, 0, 0, 134, 741, 1, 0, 0, 0, 136, 747, 1, 0, 0, 0, 138, 749, 1, 0, 0, 0, 140, 754, 1, 0, 0, 0, 142, 757, 1, 0, 0, 0, 144, 760, 1, 0, 0, 0, 146, 763, 1, 0, 0, 0, 148, 765, 1, 0, 0, 0, 150, 768, 1, 0, 0, 0, 152, 770, 1, 0, 0, 0, 154, 773, 1, 0, 0, 0, 156, 775, 1, 0, 0, 0, 158, 777, 1, 0, 0, 0, 160, 779, 1, 0, 0, 0, 162, 781, 1, 0, 0, 0, 164, 783, 1, 0, 0, 0, 166, 788, 1, 0, 0, 0, 168, 809, 1, 0, 0, 0, 170, 811, 1, 0, 0, 0, 172, 819, 1, 0, 0, 0, 174, 821, 1, 0, 0, 0, 176, 825, 1, 0, 0, 0, 178, 829, 1, 0, 0, 0, 180, 833, 1, 0, 0, 0, 182, 838, 1, 0, 0, 0, 184, 842, 1, 0, 0, 0, 186, 846, 1, 0, 0, 0, 188, 850, 1, 0, 0, 0, 190, 854, 1, 0, 0, 0, 192, 858, 1, 0, 0, 0, 194, 866, 1, 0, 0, 0, 196, 878, 1, 0, 0, 0, 198, 881, 1, 0, 0, 0, 200, 885, 1, 0, 0, 0, 202, 889, 1, 0, 0, 0, 204, 893, 1, 0, 0, 0, 206, 897, 1, 0, 0, 0, 208, 902, 1, 0, 0, 0, 210, 906, 1, 0, 0, 0, 212, 914, 1, 0, 0, 0, 214, 935, 1, 0, 0, 0, 216, 939, 1, 0, 0, 0, 218, 943, 1, 0, 0, 0, 220, 947, 1, 0, 0, 0, 222, 951, 1, 0, 0, 0, 224, 955, 1, 0, 0, 0, 226, 960, 1, 0, 0, 0, 228, 964, 1, 0, 0, 0, 230, 968, 1, 0, 0, 0, 232, 972, 1, 0, 0, 0, 234, 975, 1, 0, 0, 0, 236, 979, 1, 0, 0, 0, 238, 983, 1, 0, 0, 0, 240, 987, 1, 0, 0, 0, 242, 991, 1, 0, 0, 0, 244, 996, 1, 0, 0, 0, 246, 1001, 1, 0, 0, 0, 248, 1006, 1, 0, 0, 0, 250, 1013, 1, 0, 0, 0, 252, 1022, 1, 0, 0, 0, 254, 1029, 1, 0, 0, 0, 256, 1033, 1, 0, 0, 0, 258, 1037, 1, 0, 0, 0, 260, 1041, 1, 0, 0, 0, 262, 1045, 1, 0, 0, 0, 264, 1049, 1, 0, 0, 0, 266, 1055, 1, 0, 0, 0, 268, 1059, 1, 0, 0, 0, 270, 1063, 1, 0, 0, 0, 272, 1067, 1, 0, 0, 0, 274, 1071, 1, 0, 0, 0, 276, 1075, 1, 0, 0, 0, 278, 1079, 1, 0, 0, 0, 280, 1083, 1, 0, 0, 0, 282, 1087, 1, 0, 0, 0, 284, 1091, 1, 0, 0, 0, 286, 1096, 1, 0, 0, 0, 288, 1100, 1, 0, 0, 0, 290, 1104, 1, 0, 0, 0, 292, 1108, 1, 0, 0, 0, 294, 1112, 1, 0, 0, 0, 296, 1116, 1, 0, 0, 0, 298, 1120, 1, 0, 0, 0, 300, 1125, 1, 0, 0, 0, 302, 1130, 1, 0, 0, 0, 304, 1134, 1, 0, 0, 0, 306, 1138, 1, 0, 0, 0, 308, 1142, 1, 0, 0, 0, 310, 1147, 1, 0, 0, 0, 312, 1157, 1, 0, 0, 0, 314, 1161, 1, 0, 0, 0, 316, 1165, 1, 0, 0, 0, 318, 1169, 1, 0, 0, 0, 320, 1174, 1, 0, 0, 0, 322, 1181, 1, 0, 0, 0, 324, 1185, 1, 0, 0, 0, 326, 1189, 1, 0, 0, 0, 328, 1193, 1, 0, 0, 0, 330, 331, 5, 100, 0, 0, 331, 332, 5, 105, 0, 0, 332, 333, 5, 115, 0, 0, 333, 334, 5, 115, 0, 0, 334, 335, 5, 101, 0, 0, 335, 336, 5, 99, 0, 0, 336, 337, 5, 116, 0, 0, 337, 338, 1, 0, 0, 0, 338, 339, 6, 0, 0, 0, 339, 13, 1, 0, 0, 0, 340, 341, 5, 100, 0, 0, 341, 342, 5, 114, 0, 0, 342, 343, 5, 111, 0, 0, 343, 344, 5, 112, 0, 0, 344, 345, 1, 0, 0, 0, 345, 346, 6, 1, 1, 0, 346, 15, 1, 0, 0, 0, 347, 348, 5, 101, 0, 0, 348, 349, 5, 110, 0, 0, 349, 350, 5, 114, 0, 0, 350, 351, 5, 105, 0, 0, 351, 352, 5, 99, 0, 0, 352, 353, 5, 104, 0, 0, 353, 354, 1, 0, 0, 0, 354, 355, 6, 2, 2, 0, 355, 17, 1, 0, 0, 0, 356, 357, 5, 101, 0, 0, 357, 358, 5, 118, 0, 0, 358, 359, 5, 97, 0, 0, 359, 360, 5, 108, 0, 0, 360, 361, 1, 0, 0, 0, 361, 362, 6, 3, 0, 0, 362, 19, 1, 0, 0, 0, 363, 364, 5, 101, 0, 0, 364, 365, 5, 120, 0, 0, 365, 366, 5, 112, 0, 0, 366, 367, 5, 108, 0, 0, 367, 368, 5, 97, 0, 0, 368, 369, 5, 105, 0, 0, 369, 370, 5, 110, 0, 0, 370, 371, 1, 0, 0, 0, 371, 372, 6, 4, 3, 0, 372, 21, 1, 0, 0, 0, 373, 374, 5, 102, 0, 0, 374, 375, 5, 114, 0, 0, 375, 376, 5, 111, 0, 0, 376, 377, 5, 109, 0, 0, 377, 378, 1, 0, 0, 0, 378, 379, 6, 5, 4, 0, 379, 23, 1, 0, 0, 0, 380, 381, 5, 103, 0, 0, 381, 382, 5, 114, 0, 0, 382, 383, 5, 111, 0, 0, 383, 384, 5, 107, 0, 0, 384, 385, 1, 0, 0, 0, 385, 386, 6, 6, 0, 0, 386, 25, 1, 0, 0, 0, 387, 388, 5, 105, 0, 0, 388, 389, 5, 110, 0, 0, 389, 390, 5, 108, 0, 0, 390, 391, 5, 105, 0, 0, 391, 392, 5, 110, 0, 0, 392, 393, 5, 101, 0, 0, 393, 394, 5, 115, 0, 0, 394, 395, 5, 116, 0, 0, 395, 396, 5, 97, 0, 0, 396, 397, 5, 116, 0, 0, 397, 398, 5, 115, 0, 0, 398, 399, 1, 0, 0, 0, 399, 400, 6, 7, 0, 0, 400, 27, 1, 0, 0, 0, 401, 402, 5, 107, 0, 0, 402, 403, 5, 101, 0, 0, 403, 404, 5, 101, 0, 0, 404, 405, 5, 112, 0, 0, 405, 406, 1, 0, 0, 0, 406, 407, 6, 8, 1, 0, 407, 29, 1, 0, 0, 0, 408, 409, 5, 108, 0, 0, 409, 410, 5, 105, 0, 0, 410, 411, 5, 109, 0, 0, 411, 412, 5, 105, 0, 0, 412, 413, 5, 116, 0, 0, 413, 414, 1, 0, 0, 0, 414, 415, 6, 9, 0, 0, 415, 31, 1, 0, 0, 0, 416, 417, 5, 109, 0, 0, 417, 418, 5, 101, 0, 0, 418, 419, 5, 116, 0, 0, 419, 420, 5, 97, 0, 0, 420, 421, 1, 0, 0, 0, 421, 422, 6, 10, 5, 0, 422, 33, 1, 0, 0, 0, 423, 424, 5, 109, 0, 0, 424, 425, 5, 118, 0, 0, 425, 426, 5, 95, 0, 0, 426, 427, 5, 101, 0, 0, 427, 428, 5, 120, 0, 0, 428, 429, 5, 112, 0, 0, 429, 430, 5, 97, 0, 0, 430, 431, 5, 110, 0, 0, 431, 432, 5, 100, 0, 0, 432, 433, 1, 0, 0, 0, 433, 434, 6, 11, 6, 0, 434, 35, 1, 0, 0, 0, 435, 436, 5, 114, 0, 0, 436, 437, 5, 101, 0, 0, 437, 438, 5, 110, 0, 0, 438, 439, 5, 97, 0, 0, 439, 440, 5, 109, 0, 0, 440, 441, 5, 101, 0, 0, 441, 442, 1, 0, 0, 0, 442, 443, 6, 12, 7, 0, 443, 37, 1, 0, 0, 0, 444, 445, 5, 114, 0, 0, 445, 446, 5, 111, 0, 0, 446, 447, 5, 119, 0, 0, 447, 448, 1, 0, 0, 0, 448, 449, 6, 13, 0, 0, 449, 39, 1, 0, 0, 0, 450, 451, 5, 115, 0, 0, 451, 452, 5, 104, 0, 0, 452, 453, 5, 111, 0, 0, 453, 454, 5, 119, 0, 0, 454, 455, 1, 0, 0, 0, 455, 456, 6, 14, 8, 0, 456, 41, 1, 0, 0, 0, 457, 458, 5, 115, 0, 0, 458, 459, 5, 111, 0, 0, 459, 460, 5, 114, 0, 0, 460, 461, 5, 116, 0, 0, 461, 462, 1, 0, 0, 0, 462, 463, 6, 15, 0, 0, 463, 43, 1, 0, 0, 0, 464, 465, 5, 115, 0, 0, 465, 466, 5, 116, 0, 0, 466, 467, 5, 97, 0, 0, 467, 468, 5, 116, 0, 0, 468, 469, 5, 115, 0, 0, 469, 470, 1, 0, 0, 0, 470, 471, 6, 16, 0, 0, 471, 45, 1, 0, 0, 0, 472, 473, 5, 119, 0, 0, 473, 474, 5, 104, 0, 0, 474, 475, 5, 101, 0, 0, 475, 476, 5, 114, 0, 0, 476, 477, 5, 101, 0, 0, 477, 478, 1, 0, 0, 0, 478, 479, 6, 17, 0, 0, 479, 47, 1, 0, 0, 0, 480, 482, 8, 0, 0, 0, 481, 480, 1, 0, 0, 0, 482, 483, 1, 0, 0, 0, 483, 481, 1, 0, 0, 0, 483, 484, 1, 0, 0, 0, 484, 485, 1, 0, 0, 0, 485, 486, 6, 18, 0, 0, 486, 49, 1, 0, 0, 0, 487, 488, 5, 47, 0, 0, 488, 489, 5, 47, 0, 0, 489, 493, 1, 0, 0, 0, 490, 492, 8, 1, 0, 0, 491, 490, 1, 0, 0, 0, 492, 495, 1, 0, 0, 0, 493, 491, 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 497, 1, 0, 0, 0, 495, 493, 1, 0, 0, 0, 496, 498, 5, 13, 0, 0, 497, 496, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 500, 1, 0, 0, 0, 499, 501, 5, 10, 0, 0, 500, 499, 1, 0, 0, 0, 500, 501, 1, 0, 0, 0, 501, 502, 1, 0, 0, 0, 502, 503, 6, 19, 9, 0, 503, 51, 1, 0, 0, 0, 504, 505, 5, 47, 0, 0, 505, 506, 5, 42, 0, 0, 506, 511, 1, 0, 0, 0, 507, 510, 3, 52, 20, 0, 508, 510, 9, 0, 0, 0, 509, 507, 1, 0, 0, 0, 509, 508, 1, 0, 0, 0, 510, 513, 1, 0, 0, 0, 511, 512, 1, 0, 0, 0, 511, 509, 1, 0, 0, 0, 512, 514, 1, 0, 0, 0, 513, 511, 1, 0, 0, 0, 514, 515, 5, 42, 0, 0, 515, 516, 5, 47, 0, 0, 516, 517, 1, 0, 0, 0, 517, 518, 6, 20, 9, 0, 518, 53, 1, 0, 0, 0, 519, 521, 7, 2, 0, 0, 520, 519, 1, 0, 0, 0, 521, 522, 1, 0, 0, 0, 522, 520, 1, 0, 0, 0, 522, 523, 1, 0, 0, 0, 523, 524, 1, 0, 0, 0, 524, 525, 6, 21, 9, 0, 525, 55, 1, 0, 0, 0, 526, 527, 3, 164, 76, 0, 527, 528, 1, 0, 0, 0, 528, 529, 6, 22, 10, 0, 529, 530, 6, 22, 11, 0, 530, 57, 1, 0, 0, 0, 531, 532, 3, 66, 27, 0, 532, 533, 1, 0, 0, 0, 533, 534, 6, 23, 12, 0, 534, 535, 6, 23, 13, 0, 535, 59, 1, 0, 0, 0, 536, 537, 3, 54, 21, 0, 537, 538, 1, 0, 0, 0, 538, 539, 6, 24, 9, 0, 539, 61, 1, 0, 0, 0, 540, 541, 3, 50, 19, 0, 541, 542, 1, 0, 0, 0, 542, 543, 6, 25, 9, 0, 543, 63, 1, 0, 0, 0, 544, 545, 3, 52, 20, 0, 545, 546, 1, 0, 0, 0, 546, 547, 6, 26, 9, 0, 547, 65, 1, 0, 0, 0, 548, 549, 5, 124, 0, 0, 549, 550, 1, 0, 0, 0, 550, 551, 6, 27, 13, 0, 551, 67, 1, 0, 0, 0, 552, 553, 7, 3, 0, 0, 553, 69, 1, 0, 0, 0, 554, 555, 7, 4, 0, 0, 555, 71, 1, 0, 0, 0, 556, 557, 5, 92, 0, 0, 557, 558, 7, 5, 0, 0, 558, 73, 1, 0, 0, 0, 559, 560, 8, 6, 0, 0, 560, 75, 1, 0, 0, 0, 561, 563, 7, 7, 0, 0, 562, 564, 7, 8, 0, 0, 563, 562, 1, 0, 0, 0, 563, 564, 1, 0, 0, 0, 564, 566, 1, 0, 0, 0, 565, 567, 3, 68, 28, 0, 566, 565, 1, 0, 0, 0, 567, 568, 1, 0, 0, 0, 568, 566, 1, 0, 0, 0, 568, 569, 1, 0, 0, 0, 569, 77, 1, 0, 0, 0, 570, 571, 5, 64, 0, 0, 571, 79, 1, 0, 0, 0, 572, 573, 5, 96, 0, 0, 573, 81, 1, 0, 0, 0, 574, 578, 8, 9, 0, 0, 575, 576, 5, 96, 0, 0, 576, 578, 5, 96, 0, 0, 577, 574, 1, 0, 0, 0, 577, 575, 1, 0, 0, 0, 578, 83, 1, 0, 0, 0, 579, 580, 5, 95, 0, 0, 580, 85, 1, 0, 0, 0, 581, 585, 3, 70, 29, 0, 582, 585, 3, 68, 28, 0, 583, 585, 3, 84, 36, 0, 584, 581, 1, 0, 0, 0, 584, 582, 1, 0, 0, 0, 584, 583, 1, 0, 0, 0, 585, 87, 1, 0, 0, 0, 586, 591, 5, 34, 0, 0, 587, 590, 3, 72, 30, 0, 588, 590, 3, 74, 31, 0, 589, 587, 1, 0, 0, 0, 589, 588, 1, 0, 0, 0, 590, 593, 1, 0, 0, 0, 591, 589, 1, 0, 0, 0, 591, 592, 1, 0, 0, 0, 592, 594, 1, 0, 0, 0, 593, 591, 1, 0, 0, 0, 594, 616, 5, 34, 0, 0, 595, 596, 5, 34, 0, 0, 596, 597, 5, 34, 0, 0, 597, 598, 5, 34, 0, 0, 598, 602, 1, 0, 0, 0, 599, 601, 8, 1, 0, 0, 600, 599, 1, 0, 0, 0, 601, 604, 1, 0, 0, 0, 602, 603, 1, 0, 0, 0, 602, 600, 1, 0, 0, 0, 603, 605, 1, 0, 0, 0, 604, 602, 1, 0, 0, 0, 605, 606, 5, 34, 0, 0, 606, 607, 5, 34, 0, 0, 607, 608, 5, 34, 0, 0, 608, 610, 1, 0, 0, 0, 609, 611, 5, 34, 0, 0, 610, 609, 1, 0, 0, 0, 610, 611, 1, 0, 0, 0, 611, 613, 1, 0, 0, 0, 612, 614, 5, 34, 0, 0, 613, 612, 1, 0, 0, 0, 613, 614, 1, 0, 0, 0, 614, 616, 1, 0, 0, 0, 615, 586, 1, 0, 0, 0, 615, 595, 1, 0, 0, 0, 616, 89, 1, 0, 0, 0, 617, 619, 3, 68, 28, 0, 618, 617, 1, 0, 0, 0, 619, 620, 1, 0, 0, 0, 620, 618, 1, 0, 0, 0, 620, 621, 1, 0, 0, 0, 621, 91, 1, 0, 0, 0, 622, 624, 3, 68, 28, 0, 623, 622, 1, 0, 0, 0, 624, 625, 1, 0, 0, 0, 625, 623, 1, 0, 0, 0, 625, 626, 1, 0, 0, 0, 626, 627, 1, 0, 0, 0, 627, 631, 3, 108, 48, 0, 628, 630, 3, 68, 28, 0, 629, 628, 1, 0, 0, 0, 630, 633, 1, 0, 0, 0, 631, 629, 1, 0, 0, 0, 631, 632, 1, 0, 0, 0, 632, 665, 1, 0, 0, 0, 633, 631, 1, 0, 0, 0, 634, 636, 3, 108, 48, 0, 635, 637, 3, 68, 28, 0, 636, 635, 1, 0, 0, 0, 637, 638, 1, 0, 0, 0, 638, 636, 1, 0, 0, 0, 638, 639, 1, 0, 0, 0, 639, 665, 1, 0, 0, 0, 640, 642, 3, 68, 28, 0, 641, 640, 1, 0, 0, 0, 642, 643, 1, 0, 0, 0, 643, 641, 1, 0, 0, 0, 643, 644, 1, 0, 0, 0, 644, 652, 1, 0, 0, 0, 645, 649, 3, 108, 48, 0, 646, 648, 3, 68, 28, 0, 647, 646, 1, 0, 0, 0, 648, 651, 1, 0, 0, 0, 649, 647, 1, 0, 0, 0, 649, 650, 1, 0, 0, 0, 650, 653, 1, 0, 0, 0, 651, 649, 1, 0, 0, 0, 652, 645, 1, 0, 0, 0, 652, 653, 1, 0, 0, 0, 653, 654, 1, 0, 0, 0, 654, 655, 3, 76, 32, 0, 655, 665, 1, 0, 0, 0, 656, 658, 3, 108, 48, 0, 657, 659, 3, 68, 28, 0, 658, 657, 1, 0, 0, 0, 659, 660, 1, 0, 0, 0, 660, 658, 1, 0, 0, 0, 660, 661, 1, 0, 0, 0, 661, 662, 1, 0, 0, 0, 662, 663, 3, 76, 32, 0, 663, 665, 1, 0, 0, 0, 664, 623, 1, 0, 0, 0, 664, 634, 1, 0, 0, 0, 664, 641, 1, 0, 0, 0, 664, 656, 1, 0, 0, 0, 665, 93, 1, 0, 0, 0, 666, 667, 5, 98, 0, 0, 667, 668, 5, 121, 0, 0, 668, 95, 1, 0, 0, 0, 669, 670, 5, 97, 0, 0, 670, 671, 5, 110, 0, 0, 671, 672, 5, 100, 0, 0, 672, 97, 1, 0, 0, 0, 673, 674, 5, 97, 0, 0, 674, 675, 5, 115, 0, 0, 675, 676, 5, 99, 0, 0, 676, 99, 1, 0, 0, 0, 677, 678, 5, 61, 0, 0, 678, 101, 1, 0, 0, 0, 679, 680, 5, 58, 0, 0, 680, 681, 5, 58, 0, 0, 681, 103, 1, 0, 0, 0, 682, 683, 5, 44, 0, 0, 683, 105, 1, 0, 0, 0, 684, 685, 5, 100, 0, 0, 685, 686, 5, 101, 0, 0, 686, 687, 5, 115, 0, 0, 687, 688, 5, 99, 0, 0, 688, 107, 1, 0, 0, 0, 689, 690, 5, 46, 0, 0, 690, 109, 1, 0, 0, 0, 691, 692, 5, 102, 0, 0, 692, 693, 5, 97, 0, 0, 693, 694, 5, 108, 0, 0, 694, 695, 5, 115, 0, 0, 695, 696, 5, 101, 0, 0, 696, 111, 1, 0, 0, 0, 697, 698, 5, 102, 0, 0, 698, 699, 5, 105, 0, 0, 699, 700, 5, 114, 0, 0, 700, 701, 5, 115, 0, 0, 701, 702, 5, 116, 0, 0, 702, 113, 1, 0, 0, 0, 703, 704, 5, 108, 0, 0, 704, 705, 5, 97, 0, 0, 705, 706, 5, 115, 0, 0, 706, 707, 5, 116, 0, 0, 707, 115, 1, 0, 0, 0, 708, 709, 5, 40, 0, 0, 709, 117, 1, 0, 0, 0, 710, 711, 5, 105, 0, 0, 711, 712, 5, 110, 0, 0, 712, 119, 1, 0, 0, 0, 713, 714, 5, 105, 0, 0, 714, 715, 5, 115, 0, 0, 715, 121, 1, 0, 0, 0, 716, 717, 5, 108, 0, 0, 717, 718, 5, 105, 0, 0, 718, 719, 5, 107, 0, 0, 719, 720, 5, 101, 0, 0, 720, 123, 1, 0, 0, 0, 721, 722, 5, 110, 0, 0, 722, 723, 5, 111, 0, 0, 723, 724, 5, 116, 0, 0, 724, 125, 1, 0, 0, 0, 725, 726, 5, 110, 0, 0, 726, 727, 5, 117, 0, 0, 727, 728, 5, 108, 0, 0, 728, 729, 5, 108, 0, 0, 729, 127, 1, 0, 0, 0, 730, 731, 5, 110, 0, 0, 731, 732, 5, 117, 0, 0, 732, 733, 5, 108, 0, 0, 733, 734, 5, 108, 0, 0, 734, 735, 5, 115, 0, 0, 735, 129, 1, 0, 0, 0, 736, 737, 5, 111, 0, 0, 737, 738, 5, 114, 0, 0, 738, 131, 1, 0, 0, 0, 739, 740, 5, 63, 0, 0, 740, 133, 1, 0, 0, 0, 741, 742, 5, 114, 0, 0, 742, 743, 5, 108, 0, 0, 743, 744, 5, 105, 0, 0, 744, 745, 5, 107, 0, 0, 745, 746, 5, 101, 0, 0, 746, 135, 1, 0, 0, 0, 747, 748, 5, 41, 0, 0, 748, 137, 1, 0, 0, 0, 749, 750, 5, 116, 0, 0, 750, 751, 5, 114, 0, 0, 751, 752, 5, 117, 0, 0, 752, 753, 5, 101, 0, 0, 753, 139, 1, 0, 0, 0, 754, 755, 5, 61, 0, 0, 755, 756, 5, 61, 0, 0, 756, 141, 1, 0, 0, 0, 757, 758, 5, 61, 0, 0, 758, 759, 5, 126, 0, 0, 759, 143, 1, 0, 0, 0, 760, 761, 5, 33, 0, 0, 761, 762, 5, 61, 0, 0, 762, 145, 1, 0, 0, 0, 763, 764, 5, 60, 0, 0, 764, 147, 1, 0, 0, 0, 765, 766, 5, 60, 0, 0, 766, 767, 5, 61, 0, 0, 767, 149, 1, 0, 0, 0, 768, 769, 5, 62, 0, 0, 769, 151, 1, 0, 0, 0, 770, 771, 5, 62, 0, 0, 771, 772, 5, 61, 0, 0, 772, 153, 1, 0, 0, 0, 773, 774, 5, 43, 0, 0, 774, 155, 1, 0, 0, 0, 775, 776, 5, 45, 0, 0, 776, 157, 1, 0, 0, 0, 777, 778, 5, 42, 0, 0, 778, 159, 1, 0, 0, 0, 779, 780, 5, 47, 0, 0, 780, 161, 1, 0, 0, 0, 781, 782, 5, 37, 0, 0, 782, 163, 1, 0, 0, 0, 783, 784, 5, 91, 0, 0, 784, 785, 1, 0, 0, 0, 785, 786, 6, 76, 0, 0, 786, 787, 6, 76, 0, 0, 787, 165, 1, 0, 0, 0, 788, 789, 5, 93, 0, 0, 789, 790, 1, 0, 0, 0, 790, 791, 6, 77, 13, 0, 791, 792, 6, 77, 13, 0, 792, 167, 1, 0, 0, 0, 793, 797, 3, 70, 29, 0, 794, 796, 3, 86, 37, 0, 795, 794, 1, 0, 0, 0, 796, 799, 1, 0, 0, 0, 797, 795, 1, 0, 0, 0, 797, 798, 1, 0, 0, 0, 798, 810, 1, 0, 0, 0, 799, 797, 1, 0, 0, 0, 800, 803, 3, 84, 36, 0, 801, 803, 3, 78, 33, 0, 802, 800, 1, 0, 0, 0, 802, 801, 1, 0, 0, 0, 803, 805, 1, 0, 0, 0, 804, 806, 3, 86, 37, 0, 805, 804, 1, 0, 0, 0, 806, 807, 1, 0, 0, 0, 807, 805, 1, 0, 0, 0, 807, 808, 1, 0, 0, 0, 808, 810, 1, 0, 0, 0, 809, 793, 1, 0, 0, 0, 809, 802, 1, 0, 0, 0, 810, 169, 1, 0, 0, 0, 811, 813, 3, 80, 34, 0, 812, 814, 3, 82, 35, 0, 813, 812, 1, 0, 0, 0, 814, 815, 1, 0, 0, 0, 815, 813, 1, 0, 0, 0, 815, 816, 1, 0, 0, 0, 816, 817, 1, 0, 0, 0, 817, 818, 3, 80, 34, 0, 818, 171, 1, 0, 0, 0, 819, 820, 3, 170, 79, 0, 820, 173, 1, 0, 0, 0, 821, 822, 3, 50, 19, 0, 822, 823, 1, 0, 0, 0, 823, 824, 6, 81, 9, 0, 824, 175, 1, 0, 0, 0, 825, 826, 3, 52, 20, 0, 826, 827, 1, 0, 0, 0, 827, 828, 6, 82, 9, 0, 828, 177, 1, 0, 0, 0, 829, 830, 3, 54, 21, 0, 830, 831, 1, 0, 0, 0, 831, 832, 6, 83, 9, 0, 832, 179, 1, 0, 0, 0, 833, 834, 3, 66, 27, 0, 834, 835, 1, 0, 0, 0, 835, 836, 6, 84, 12, 0, 836, 837, 6, 84, 13, 0, 837, 181, 1, 0, 0, 0, 838, 839, 3, 164, 76, 0, 839, 840, 1, 0, 0, 0, 840, 841, 6, 85, 10, 0, 841, 183, 1, 0, 0, 0, 842, 843, 3, 166, 77, 0, 843, 844, 1, 0, 0, 0, 844, 845, 6, 86, 14, 0, 845, 185, 1, 0, 0, 0, 846, 847, 3, 104, 46, 0, 847, 848, 1, 0, 0, 0, 848, 849, 6, 87, 15, 0, 849, 187, 1, 0, 0, 0, 850, 851, 3, 100, 44, 0, 851, 852, 1, 0, 0, 0, 852, 853, 6, 88, 16, 0, 853, 189, 1, 0, 0, 0, 854, 855, 3, 88, 38, 0, 855, 856, 1, 0, 0, 0, 856, 857, 6, 89, 17, 0, 857, 191, 1, 0, 0, 0, 858, 859, 5, 111, 0, 0, 859, 860, 5, 112, 0, 0, 860, 861, 5, 116, 0, 0, 861, 862, 5, 105, 0, 0, 862, 863, 5, 111, 0, 0, 863, 864, 5, 110, 0, 0, 864, 865, 5, 115, 0, 0, 865, 193, 1, 0, 0, 0, 866, 867, 5, 109, 0, 0, 867, 868, 5, 101, 0, 0, 868, 869, 5, 116, 0, 0, 869, 870, 5, 97, 0, 0, 870, 871, 5, 100, 0, 0, 871, 872, 5, 97, 0, 0, 872, 873, 5, 116, 0, 0, 873, 874, 5, 97, 0, 0, 874, 195, 1, 0, 0, 0, 875, 879, 8, 10, 0, 0, 876, 877, 5, 47, 0, 0, 877, 879, 8, 11, 0, 0, 878, 875, 1, 0, 0, 0, 878, 876, 1, 0, 0, 0, 879, 197, 1, 0, 0, 0, 880, 882, 3, 196, 92, 0, 881, 880, 1, 0, 0, 0, 882, 883, 1, 0, 0, 0, 883, 881, 1, 0, 0, 0, 883, 884, 1, 0, 0, 0, 884, 199, 1, 0, 0, 0, 885, 886, 3, 50, 19, 0, 886, 887, 1, 0, 0, 0, 887, 888, 6, 94, 9, 0, 888, 201, 1, 0, 0, 0, 889, 890, 3, 52, 20, 0, 890, 891, 1, 0, 0, 0, 891, 892, 6, 95, 9, 0, 892, 203, 1, 0, 0, 0, 893, 894, 3, 54, 21, 0, 894, 895, 1, 0, 0, 0, 895, 896, 6, 96, 9, 0, 896, 205, 1, 0, 0, 0, 897, 898, 3, 66, 27, 0, 898, 899, 1, 0, 0, 0, 899, 900, 6, 97, 12, 0, 900, 901, 6, 97, 13, 0, 901, 207, 1, 0, 0, 0, 902, 903, 3, 108, 48, 0, 903, 904, 1, 0, 0, 0, 904, 905, 6, 98, 18, 0, 905, 209, 1, 0, 0, 0, 906, 907, 3, 104, 46, 0, 907, 908, 1, 0, 0, 0, 908, 909, 6, 99, 15, 0, 909, 211, 1, 0, 0, 0, 910, 915, 3, 70, 29, 0, 911, 915, 3, 68, 28, 0, 912, 915, 3, 84, 36, 0, 913, 915, 3, 158, 73, 0, 914, 910, 1, 0, 0, 0, 914, 911, 1, 0, 0, 0, 914, 912, 1, 0, 0, 0, 914, 913, 1, 0, 0, 0, 915, 213, 1, 0, 0, 0, 916, 919, 3, 70, 29, 0, 917, 919, 3, 158, 73, 0, 918, 916, 1, 0, 0, 0, 918, 917, 1, 0, 0, 0, 919, 923, 1, 0, 0, 0, 920, 922, 3, 212, 100, 0, 921, 920, 1, 0, 0, 0, 922, 925, 1, 0, 0, 0, 923, 921, 1, 0, 0, 0, 923, 924, 1, 0, 0, 0, 924, 936, 1, 0, 0, 0, 925, 923, 1, 0, 0, 0, 926, 929, 3, 84, 36, 0, 927, 929, 3, 78, 33, 0, 928, 926, 1, 0, 0, 0, 928, 927, 1, 0, 0, 0, 929, 931, 1, 0, 0, 0, 930, 932, 3, 212, 100, 0, 931, 930, 1, 0, 0, 0, 932, 933, 1, 0, 0, 0, 933, 931, 1, 0, 0, 0, 933, 934, 1, 0, 0, 0, 934, 936, 1, 0, 0, 0, 935, 918, 1, 0, 0, 0, 935, 928, 1, 0, 0, 0, 936, 215, 1, 0, 0, 0, 937, 940, 3, 214, 101, 0, 938, 940, 3, 170, 79, 0, 939, 937, 1, 0, 0, 0, 939, 938, 1, 0, 0, 0, 940, 941, 1, 0, 0, 0, 941, 939, 1, 0, 0, 0, 941, 942, 1, 0, 0, 0, 942, 217, 1, 0, 0, 0, 943, 944, 3, 50, 19, 0, 944, 945, 1, 0, 0, 0, 945, 946, 6, 103, 9, 0, 946, 219, 1, 0, 0, 0, 947, 948, 3, 52, 20, 0, 948, 949, 1, 0, 0, 0, 949, 950, 6, 104, 9, 0, 950, 221, 1, 0, 0, 0, 951, 952, 3, 54, 21, 0, 952, 953, 1, 0, 0, 0, 953, 954, 6, 105, 9, 0, 954, 223, 1, 0, 0, 0, 955, 956, 3, 66, 27, 0, 956, 957, 1, 0, 0, 0, 957, 958, 6, 106, 12, 0, 958, 959, 6, 106, 13, 0, 959, 225, 1, 0, 0, 0, 960, 961, 3, 100, 44, 0, 961, 962, 1, 0, 0, 0, 962, 963, 6, 107, 16, 0, 963, 227, 1, 0, 0, 0, 964, 965, 3, 104, 46, 0, 965, 966, 1, 0, 0, 0, 966, 967, 6, 108, 15, 0, 967, 229, 1, 0, 0, 0, 968, 969, 3, 108, 48, 0, 969, 970, 1, 0, 0, 0, 970, 971, 6, 109, 18, 0, 971, 231, 1, 0, 0, 0, 972, 973, 5, 97, 0, 0, 973, 974, 5, 115, 0, 0, 974, 233, 1, 0, 0, 0, 975, 976, 3, 216, 102, 0, 976, 977, 1, 0, 0, 0, 977, 978, 6, 111, 19, 0, 978, 235, 1, 0, 0, 0, 979, 980, 3, 50, 19, 0, 980, 981, 1, 0, 0, 0, 981, 982, 6, 112, 9, 0, 982, 237, 1, 0, 0, 0, 983, 984, 3, 52, 20, 0, 984, 985, 1, 0, 0, 0, 985, 986, 6, 113, 9, 0, 986, 239, 1, 0, 0, 0, 987, 988, 3, 54, 21, 0, 988, 989, 1, 0, 0, 0, 989, 990, 6, 114, 9, 0, 990, 241, 1, 0, 0, 0, 991, 992, 3, 66, 27, 0, 992, 993, 1, 0, 0, 0, 993, 994, 6, 115, 12, 0, 994, 995, 6, 115, 13, 0, 995, 243, 1, 0, 0, 0, 996, 997, 3, 164, 76, 0, 997, 998, 1, 0, 0, 0, 998, 999, 6, 116, 10, 0, 999, 1000, 6, 116, 20, 0, 1000, 245, 1, 0, 0, 0, 1001, 1002, 5, 111, 0, 0, 1002, 1003, 5, 110, 0, 0, 1003, 1004, 1, 0, 0, 0, 1004, 1005, 6, 117, 21, 0, 1005, 247, 1, 0, 0, 0, 1006, 1007, 5, 119, 0, 0, 1007, 1008, 5, 105, 0, 0, 1008, 1009, 5, 116, 0, 0, 1009, 1010, 5, 104, 0, 0, 1010, 1011, 1, 0, 0, 0, 1011, 1012, 6, 118, 21, 0, 1012, 249, 1, 0, 0, 0, 1013, 1014, 8, 12, 0, 0, 1014, 251, 1, 0, 0, 0, 1015, 1017, 3, 250, 119, 0, 1016, 1015, 1, 0, 0, 0, 1017, 1018, 1, 0, 0, 0, 1018, 1016, 1, 0, 0, 0, 1018, 1019, 1, 0, 0, 0, 1019, 1020, 1, 0, 0, 0, 1020, 1021, 3, 320, 154, 0, 1021, 1023, 1, 0, 0, 0, 1022, 1016, 1, 0, 0, 0, 1022, 1023, 1, 0, 0, 0, 1023, 1025, 1, 0, 0, 0, 1024, 1026, 3, 250, 119, 0, 1025, 1024, 1, 0, 0, 0, 1026, 1027, 1, 0, 0, 0, 1027, 1025, 1, 0, 0, 0, 1027, 1028, 1, 0, 0, 0, 1028, 253, 1, 0, 0, 0, 1029, 1030, 3, 172, 80, 0, 1030, 1031, 1, 0, 0, 0, 1031, 1032, 6, 121, 22, 0, 1032, 255, 1, 0, 0, 0, 1033, 1034, 3, 252, 120, 0, 1034, 1035, 1, 0, 0, 0, 1035, 1036, 6, 122, 23, 0, 1036, 257, 1, 0, 0, 0, 1037, 1038, 3, 50, 19, 0, 1038, 1039, 1, 0, 0, 0, 1039, 1040, 6, 123, 9, 0, 1040, 259, 1, 0, 0, 0, 1041, 1042, 3, 52, 20, 0, 1042, 1043, 1, 0, 0, 0, 1043, 1044, 6, 124, 9, 0, 1044, 261, 1, 0, 0, 0, 1045, 1046, 3, 54, 21, 0, 1046, 1047, 1, 0, 0, 0, 1047, 1048, 6, 125, 9, 0, 1048, 263, 1, 0, 0, 0, 1049, 1050, 3, 66, 27, 0, 1050, 1051, 1, 0, 0, 0, 1051, 1052, 6, 126, 12, 0, 1052, 1053, 6, 126, 13, 0, 1053, 1054, 6, 126, 13, 0, 1054, 265, 1, 0, 0, 0, 1055, 1056, 3, 100, 44, 0, 1056, 1057, 1, 0, 0, 0, 1057, 1058, 6, 127, 16, 0, 1058, 267, 1, 0, 0, 0, 1059, 1060, 3, 104, 46, 0, 1060, 1061, 1, 0, 0, 0, 1061, 1062, 6, 128, 15, 0, 1062, 269, 1, 0, 0, 0, 1063, 1064, 3, 108, 48, 0, 1064, 1065, 1, 0, 0, 0, 1065, 1066, 6, 129, 18, 0, 1066, 271, 1, 0, 0, 0, 1067, 1068, 3, 248, 118, 0, 1068, 1069, 1, 0, 0, 0, 1069, 1070, 6, 130, 24, 0, 1070, 273, 1, 0, 0, 0, 1071, 1072, 3, 216, 102, 0, 1072, 1073, 1, 0, 0, 0, 1073, 1074, 6, 131, 19, 0, 1074, 275, 1, 0, 0, 0, 1075, 1076, 3, 172, 80, 0, 1076, 1077, 1, 0, 0, 0, 1077, 1078, 6, 132, 22, 0, 1078, 277, 1, 0, 0, 0, 1079, 1080, 3, 50, 19, 0, 1080, 1081, 1, 0, 0, 0, 1081, 1082, 6, 133, 9, 0, 1082, 279, 1, 0, 0, 0, 1083, 1084, 3, 52, 20, 0, 1084, 1085, 1, 0, 0, 0, 1085, 1086, 6, 134, 9, 0, 1086, 281, 1, 0, 0, 0, 1087, 1088, 3, 54, 21, 0, 1088, 1089, 1, 0, 0, 0, 1089, 1090, 6, 135, 9, 0, 1090, 283, 1, 0, 0, 0, 1091, 1092, 3, 66, 27, 0, 1092, 1093, 1, 0, 0, 0, 1093, 1094, 6, 136, 12, 0, 1094, 1095, 6, 136, 13, 0, 1095, 285, 1, 0, 0, 0, 1096, 1097, 3, 108, 48, 0, 1097, 1098, 1, 0, 0, 0, 1098, 1099, 6, 137, 18, 0, 1099, 287, 1, 0, 0, 0, 1100, 1101, 3, 172, 80, 0, 1101, 1102, 1, 0, 0, 0, 1102, 1103, 6, 138, 22, 0, 1103, 289, 1, 0, 0, 0, 1104, 1105, 3, 168, 78, 0, 1105, 1106, 1, 0, 0, 0, 1106, 1107, 6, 139, 25, 0, 1107, 291, 1, 0, 0, 0, 1108, 1109, 3, 50, 19, 0, 1109, 1110, 1, 0, 0, 0, 1110, 1111, 6, 140, 9, 0, 1111, 293, 1, 0, 0, 0, 1112, 1113, 3, 52, 20, 0, 1113, 1114, 1, 0, 0, 0, 1114, 1115, 6, 141, 9, 0, 1115, 295, 1, 0, 0, 0, 1116, 1117, 3, 54, 21, 0, 1117, 1118, 1, 0, 0, 0, 1118, 1119, 6, 142, 9, 0, 1119, 297, 1, 0, 0, 0, 1120, 1121, 3, 66, 27, 0, 1121, 1122, 1, 0, 0, 0, 1122, 1123, 6, 143, 12, 0, 1123, 1124, 6, 143, 13, 0, 1124, 299, 1, 0, 0, 0, 1125, 1126, 5, 105, 0, 0, 1126, 1127, 5, 110, 0, 0, 1127, 1128, 5, 102, 0, 0, 1128, 1129, 5, 111, 0, 0, 1129, 301, 1, 0, 0, 0, 1130, 1131, 3, 50, 19, 0, 1131, 1132, 1, 0, 0, 0, 1132, 1133, 6, 145, 9, 0, 1133, 303, 1, 0, 0, 0, 1134, 1135, 3, 52, 20, 0, 1135, 1136, 1, 0, 0, 0, 1136, 1137, 6, 146, 9, 0, 1137, 305, 1, 0, 0, 0, 1138, 1139, 3, 54, 21, 0, 1139, 1140, 1, 0, 0, 0, 1140, 1141, 6, 147, 9, 0, 1141, 307, 1, 0, 0, 0, 1142, 1143, 3, 66, 27, 0, 1143, 1144, 1, 0, 0, 0, 1144, 1145, 6, 148, 12, 0, 1145, 1146, 6, 148, 13, 0, 1146, 309, 1, 0, 0, 0, 1147, 1148, 5, 102, 0, 0, 1148, 1149, 5, 117, 0, 0, 1149, 1150, 5, 110, 0, 0, 1150, 1151, 5, 99, 0, 0, 1151, 1152, 5, 116, 0, 0, 1152, 1153, 5, 105, 0, 0, 1153, 1154, 5, 111, 0, 0, 1154, 1155, 5, 110, 0, 0, 1155, 1156, 5, 115, 0, 0, 1156, 311, 1, 0, 0, 0, 1157, 1158, 3, 50, 19, 0, 1158, 1159, 1, 0, 0, 0, 1159, 1160, 6, 150, 9, 0, 1160, 313, 1, 0, 0, 0, 1161, 1162, 3, 52, 20, 0, 1162, 1163, 1, 0, 0, 0, 1163, 1164, 6, 151, 9, 0, 1164, 315, 1, 0, 0, 0, 1165, 1166, 3, 54, 21, 0, 1166, 1167, 1, 0, 0, 0, 1167, 1168, 6, 152, 9, 0, 1168, 317, 1, 0, 0, 0, 1169, 1170, 3, 166, 77, 0, 1170, 1171, 1, 0, 0, 0, 1171, 1172, 6, 153, 14, 0, 1172, 1173, 6, 153, 13, 0, 1173, 319, 1, 0, 0, 0, 1174, 1175, 5, 58, 0, 0, 1175, 321, 1, 0, 0, 0, 1176, 1182, 3, 78, 33, 0, 1177, 1182, 3, 68, 28, 0, 1178, 1182, 3, 108, 48, 0, 1179, 1182, 3, 70, 29, 0, 1180, 1182, 3, 84, 36, 0, 1181, 1176, 1, 0, 0, 0, 1181, 1177, 1, 0, 0, 0, 1181, 1178, 1, 0, 0, 0, 1181, 1179, 1, 0, 0, 0, 1181, 1180, 1, 0, 0, 0, 1182, 1183, 1, 0, 0, 0, 1183, 1181, 1, 0, 0, 0, 1183, 1184, 1, 0, 0, 0, 1184, 323, 1, 0, 0, 0, 1185, 1186, 3, 50, 19, 0, 1186, 1187, 1, 0, 0, 0, 1187, 1188, 6, 156, 9, 0, 1188, 325, 1, 0, 0, 0, 1189, 1190, 3, 52, 20, 0, 1190, 1191, 1, 0, 0, 0, 1191, 1192, 6, 157, 9, 0, 1192, 327, 1, 0, 0, 0, 1193, 1194, 3, 54, 21, 0, 1194, 1195, 1, 0, 0, 0, 1195, 1196, 6, 158, 9, 0, 1196, 329, 1, 0, 0, 0, 58, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 483, 493, 497, 500, 509, 511, 522, 563, 568, 577, 584, 589, 591, 602, 610, 613, 615, 620, 625, 631, 638, 643, 649, 652, 660, 664, 797, 802, 807, 809, 815, 878, 883, 914, 918, 923, 928, 933, 935, 939, 941, 1018, 1022, 1027, 1181, 1183, 26, 5, 2, 0, 5, 4, 0, 5, 6, 0, 5, 1, 0, 5, 3, 0, 5, 10, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 0, 1, 0, 7, 65, 0, 5, 0, 0, 7, 26, 0, 4, 0, 0, 7, 66, 0, 7, 35, 0, 7, 33, 0, 7, 27, 0, 7, 37, 0, 7, 78, 0, 5, 11, 0, 5, 7, 0, 7, 68, 0, 7, 88, 0, 7, 87, 0, 7, 67, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index ac3354d0aa90..75fa8061fa48 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -69,11 +69,11 @@ private static String[] makeRuleNames() { "QUOTED_ID", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", "FROM_COMMA", "FROM_ASSIGN", "FROM_QUOTED_STRING", "OPTIONS", "METADATA", - "FROM_UNQUOTED_IDENTIFIER_PART", "FROM_UNQUOTED_IDENTIFIER", "FROM_QUOTED_IDENTIFIER", - "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", "FROM_WS", "PROJECT_PIPE", - "PROJECT_DOT", "PROJECT_COMMA", "UNQUOTED_ID_BODY_WITH_PATTERN", "UNQUOTED_ID_PATTERN", - "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", - "RENAME_PIPE", "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", "AS", "RENAME_ID_PATTERN", + "FROM_UNQUOTED_IDENTIFIER_PART", "FROM_UNQUOTED_IDENTIFIER", "FROM_LINE_COMMENT", + "FROM_MULTILINE_COMMENT", "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", + "UNQUOTED_ID_BODY_WITH_PATTERN", "UNQUOTED_ID_PATTERN", "ID_PATTERN", + "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "RENAME_PIPE", + "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", "AS", "RENAME_ID_PATTERN", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ENRICH_PIPE", "ENRICH_OPENING_BRACKET", "ON", "WITH", "ENRICH_POLICY_NAME_BODY", "ENRICH_POLICY_NAME", "ENRICH_QUOTED_IDENTIFIER", "ENRICH_MODE_UNQUOTED_VALUE", "ENRICH_LINE_COMMENT", @@ -191,7 +191,7 @@ public EsqlBaseLexer(CharStream input) { public ATN getATN() { return _ATN; } public static final String _serializedATN = - "\u0004\u0000n\u04b3\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ + "\u0004\u0000n\u04ad\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ @@ -234,720 +234,716 @@ public EsqlBaseLexer(CharStream input) { "\u0002\u0096\u0007\u0096\u0002\u0097\u0007\u0097\u0002\u0098\u0007\u0098"+ "\u0002\u0099\u0007\u0099\u0002\u009a\u0007\u009a\u0002\u009b\u0007\u009b"+ "\u0002\u009c\u0007\u009c\u0002\u009d\u0007\u009d\u0002\u009e\u0007\u009e"+ - "\u0002\u009f\u0007\u009f\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000"+ "\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+ - "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004"+ + "\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0002"+ + "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+ + "\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0004"+ "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004"+ - "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006"+ - "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007"+ + "\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ + "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001\u0007"+ "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ - "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ - "\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001"+ - "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001"+ - "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001"+ + "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001"+ + "\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ + "\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ "\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ - "\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001"+ - "\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001"+ - "\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ - "\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ - "\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001"+ - "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ + "\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001"+ + "\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001"+ + "\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ + "\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ + "\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ + "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001"+ "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ - "\u0011\u0001\u0011\u0001\u0012\u0004\u0012\u01e4\b\u0012\u000b\u0012\f"+ - "\u0012\u01e5\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013"+ - "\u0001\u0013\u0005\u0013\u01ee\b\u0013\n\u0013\f\u0013\u01f1\t\u0013\u0001"+ - "\u0013\u0003\u0013\u01f4\b\u0013\u0001\u0013\u0003\u0013\u01f7\b\u0013"+ - "\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014"+ - "\u0001\u0014\u0005\u0014\u0200\b\u0014\n\u0014\f\u0014\u0203\t\u0014\u0001"+ - "\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0015\u0004"+ - "\u0015\u020b\b\u0015\u000b\u0015\f\u0015\u020c\u0001\u0015\u0001\u0015"+ - "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0017"+ - "\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018"+ - "\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019"+ - "\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b"+ - "\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001d\u0001\u001d"+ - "\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0001 \u0001"+ - " \u0003 \u0236\b \u0001 \u0004 \u0239\b \u000b \f \u023a\u0001!\u0001"+ - "!\u0001\"\u0001\"\u0001#\u0001#\u0001#\u0003#\u0244\b#\u0001$\u0001$\u0001"+ - "%\u0001%\u0001%\u0003%\u024b\b%\u0001&\u0001&\u0001&\u0005&\u0250\b&\n"+ - "&\f&\u0253\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0001&\u0005&\u025b\b"+ - "&\n&\f&\u025e\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0003&\u0265\b&\u0001"+ - "&\u0003&\u0268\b&\u0003&\u026a\b&\u0001\'\u0004\'\u026d\b\'\u000b\'\f"+ - "\'\u026e\u0001(\u0004(\u0272\b(\u000b(\f(\u0273\u0001(\u0001(\u0005(\u0278"+ - "\b(\n(\f(\u027b\t(\u0001(\u0001(\u0004(\u027f\b(\u000b(\f(\u0280\u0001"+ - "(\u0004(\u0284\b(\u000b(\f(\u0285\u0001(\u0001(\u0005(\u028a\b(\n(\f("+ - "\u028d\t(\u0003(\u028f\b(\u0001(\u0001(\u0001(\u0001(\u0004(\u0295\b("+ - "\u000b(\f(\u0296\u0001(\u0001(\u0003(\u029b\b(\u0001)\u0001)\u0001)\u0001"+ - "*\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001"+ - "-\u0001-\u0001-\u0001.\u0001.\u0001/\u0001/\u0001/\u0001/\u0001/\u0001"+ - "0\u00010\u00011\u00011\u00011\u00011\u00011\u00011\u00012\u00012\u0001"+ - "2\u00012\u00012\u00012\u00013\u00013\u00013\u00013\u00013\u00014\u0001"+ - "4\u00015\u00015\u00015\u00016\u00016\u00016\u00017\u00017\u00017\u0001"+ - "7\u00017\u00018\u00018\u00018\u00018\u00019\u00019\u00019\u00019\u0001"+ - "9\u0001:\u0001:\u0001:\u0001:\u0001:\u0001:\u0001;\u0001;\u0001;\u0001"+ - "<\u0001<\u0001=\u0001=\u0001=\u0001=\u0001=\u0001=\u0001>\u0001>\u0001"+ - "?\u0001?\u0001?\u0001?\u0001?\u0001@\u0001@\u0001@\u0001A\u0001A\u0001"+ - "A\u0001B\u0001B\u0001B\u0001C\u0001C\u0001D\u0001D\u0001D\u0001E\u0001"+ - "E\u0001F\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001I\u0001I\u0001"+ - "J\u0001J\u0001K\u0001K\u0001L\u0001L\u0001L\u0001L\u0001L\u0001M\u0001"+ - "M\u0001M\u0001M\u0001M\u0001N\u0001N\u0005N\u031e\bN\nN\fN\u0321\tN\u0001"+ - "N\u0001N\u0003N\u0325\bN\u0001N\u0004N\u0328\bN\u000bN\fN\u0329\u0003"+ - "N\u032c\bN\u0001O\u0001O\u0004O\u0330\bO\u000bO\fO\u0331\u0001O\u0001"+ - "O\u0001P\u0001P\u0001Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001"+ - "R\u0001S\u0001S\u0001S\u0001S\u0001T\u0001T\u0001T\u0001T\u0001T\u0001"+ - "U\u0001U\u0001U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001"+ - "W\u0001W\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001"+ - "Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001"+ - "[\u0001[\u0001[\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0003"+ - "\\\u0371\b\\\u0001]\u0004]\u0374\b]\u000b]\f]\u0375\u0001^\u0001^\u0001"+ - "^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001`\u0001`\u0001"+ - "a\u0001a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001b\u0001c\u0001"+ - "c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001d\u0001e\u0001e\u0001e\u0001"+ - "e\u0003e\u0399\be\u0001f\u0001f\u0003f\u039d\bf\u0001f\u0005f\u03a0\b"+ - "f\nf\ff\u03a3\tf\u0001f\u0001f\u0003f\u03a7\bf\u0001f\u0004f\u03aa\bf"+ - "\u000bf\ff\u03ab\u0003f\u03ae\bf\u0001g\u0001g\u0004g\u03b2\bg\u000bg"+ - "\fg\u03b3\u0001h\u0001h\u0001h\u0001h\u0001i\u0001i\u0001i\u0001i\u0001"+ - "j\u0001j\u0001j\u0001j\u0001k\u0001k\u0001k\u0001k\u0001k\u0001l\u0001"+ - "l\u0001l\u0001l\u0001m\u0001m\u0001m\u0001m\u0001n\u0001n\u0001n\u0001"+ - "n\u0001o\u0001o\u0001o\u0001p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001"+ - "q\u0001q\u0001r\u0001r\u0001r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001"+ - "t\u0001t\u0001t\u0001t\u0001t\u0001u\u0001u\u0001u\u0001u\u0001u\u0001"+ - "v\u0001v\u0001v\u0001v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001w\u0001"+ - "w\u0001w\u0001x\u0001x\u0001y\u0004y\u03ff\by\u000by\fy\u0400\u0001y\u0001"+ - "y\u0003y\u0405\by\u0001y\u0004y\u0408\by\u000by\fy\u0409\u0001z\u0001"+ - "z\u0001z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001"+ - "|\u0001}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001~\u0001~\u0001\u007f"+ - "\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u0080"+ - "\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0081\u0001\u0081\u0001\u0081"+ - "\u0001\u0081\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0083"+ - "\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001\u0084"+ - "\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0086"+ - "\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0087\u0001\u0087\u0001\u0087"+ - "\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0089"+ - "\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001\u008a"+ - "\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b"+ - "\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001\u008d"+ - "\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e"+ - "\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001\u0090"+ - "\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091"+ - "\u0001\u0091\u0001\u0091\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092"+ - "\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094"+ - "\u0001\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095"+ - "\u0001\u0095\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096"+ - "\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0097"+ - "\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0098\u0001\u0098\u0001\u0098"+ - "\u0001\u0098\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u009a"+ - "\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009b\u0001\u009b"+ - "\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0004\u009c"+ - "\u04a4\b\u009c\u000b\u009c\f\u009c\u04a5\u0001\u009d\u0001\u009d\u0001"+ - "\u009d\u0001\u009d\u0001\u009e\u0001\u009e\u0001\u009e\u0001\u009e\u0001"+ - "\u009f\u0001\u009f\u0001\u009f\u0001\u009f\u0002\u0201\u025c\u0000\u00a0"+ - "\f\u0001\u000e\u0002\u0010\u0003\u0012\u0004\u0014\u0005\u0016\u0006\u0018"+ - "\u0007\u001a\b\u001c\t\u001e\n \u000b\"\f$\r&\u000e(\u000f*\u0010,\u0011"+ - ".\u00120\u00132\u00144\u00156\u00168\u0000:\u0000<\u0017>\u0018@\u0019"+ - "B\u001aD\u0000F\u0000H\u0000J\u0000L\u0000N\u0000P\u0000R\u0000T\u0000"+ - "V\u0000X\u001bZ\u001c\\\u001d^\u001e`\u001fb d!f\"h#j$l%n&p\'r(t)v*x+"+ - "z,|-~.\u0080/\u00820\u00841\u00862\u00883\u008a4\u008c5\u008e6\u00907"+ - "\u00928\u00949\u0096:\u0098;\u009a<\u009c=\u009e>\u00a0?\u00a2@\u00a4"+ - "A\u00a6B\u00a8C\u00aa\u0000\u00acD\u00aeE\u00b0F\u00b2G\u00b4\u0000\u00b6"+ - "\u0000\u00b8\u0000\u00ba\u0000\u00bc\u0000\u00be\u0000\u00c0H\u00c2I\u00c4"+ - "\u0000\u00c6J\u00c8\u0000\u00caK\u00ccL\u00ceM\u00d0\u0000\u00d2\u0000"+ - "\u00d4\u0000\u00d6\u0000\u00d8\u0000\u00daN\u00dcO\u00deP\u00e0Q\u00e2"+ - "\u0000\u00e4\u0000\u00e6\u0000\u00e8\u0000\u00eaR\u00ec\u0000\u00eeS\u00f0"+ - "T\u00f2U\u00f4\u0000\u00f6\u0000\u00f8V\u00faW\u00fc\u0000\u00feX\u0100"+ - "\u0000\u0102\u0000\u0104Y\u0106Z\u0108[\u010a\u0000\u010c\u0000\u010e"+ - "\u0000\u0110\u0000\u0112\u0000\u0114\u0000\u0116\u0000\u0118\\\u011a]"+ - "\u011c^\u011e\u0000\u0120\u0000\u0122\u0000\u0124\u0000\u0126_\u0128`"+ - "\u012aa\u012c\u0000\u012eb\u0130c\u0132d\u0134e\u0136\u0000\u0138f\u013a"+ - "g\u013ch\u013ei\u0140\u0000\u0142j\u0144k\u0146l\u0148m\u014an\f\u0000"+ - "\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\r\u0006\u0000\t"+ - "\n\r\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r\r \u0001\u00000"+ - "9\u0002\u0000AZaz\u0005\u0000\"\"\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\"+ - "\\\u0002\u0000EEee\u0002\u0000++--\u0001\u0000``\n\u0000\t\n\r\r ,,/"+ - "/==[[]]``||\u0002\u0000**//\u000b\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u04ce"+ - "\u0000\f\u0001\u0000\u0000\u0000\u0000\u000e\u0001\u0000\u0000\u0000\u0000"+ - "\u0010\u0001\u0000\u0000\u0000\u0000\u0012\u0001\u0000\u0000\u0000\u0000"+ - "\u0014\u0001\u0000\u0000\u0000\u0000\u0016\u0001\u0000\u0000\u0000\u0000"+ - "\u0018\u0001\u0000\u0000\u0000\u0000\u001a\u0001\u0000\u0000\u0000\u0000"+ - "\u001c\u0001\u0000\u0000\u0000\u0000\u001e\u0001\u0000\u0000\u0000\u0000"+ - " \u0001\u0000\u0000\u0000\u0000\"\u0001\u0000\u0000\u0000\u0000$\u0001"+ - "\u0000\u0000\u0000\u0000&\u0001\u0000\u0000\u0000\u0000(\u0001\u0000\u0000"+ - "\u0000\u0000*\u0001\u0000\u0000\u0000\u0000,\u0001\u0000\u0000\u0000\u0000"+ - ".\u0001\u0000\u0000\u0000\u00000\u0001\u0000\u0000\u0000\u00002\u0001"+ - "\u0000\u0000\u0000\u00004\u0001\u0000\u0000\u0000\u00006\u0001\u0000\u0000"+ - "\u0000\u00018\u0001\u0000\u0000\u0000\u0001:\u0001\u0000\u0000\u0000\u0001"+ - "<\u0001\u0000\u0000\u0000\u0001>\u0001\u0000\u0000\u0000\u0001@\u0001"+ - "\u0000\u0000\u0000\u0002B\u0001\u0000\u0000\u0000\u0002X\u0001\u0000\u0000"+ - "\u0000\u0002Z\u0001\u0000\u0000\u0000\u0002\\\u0001\u0000\u0000\u0000"+ - "\u0002^\u0001\u0000\u0000\u0000\u0002`\u0001\u0000\u0000\u0000\u0002b"+ - "\u0001\u0000\u0000\u0000\u0002d\u0001\u0000\u0000\u0000\u0002f\u0001\u0000"+ - "\u0000\u0000\u0002h\u0001\u0000\u0000\u0000\u0002j\u0001\u0000\u0000\u0000"+ - "\u0002l\u0001\u0000\u0000\u0000\u0002n\u0001\u0000\u0000\u0000\u0002p"+ - "\u0001\u0000\u0000\u0000\u0002r\u0001\u0000\u0000\u0000\u0002t\u0001\u0000"+ - "\u0000\u0000\u0002v\u0001\u0000\u0000\u0000\u0002x\u0001\u0000\u0000\u0000"+ - "\u0002z\u0001\u0000\u0000\u0000\u0002|\u0001\u0000\u0000\u0000\u0002~"+ - "\u0001\u0000\u0000\u0000\u0002\u0080\u0001\u0000\u0000\u0000\u0002\u0082"+ - "\u0001\u0000\u0000\u0000\u0002\u0084\u0001\u0000\u0000\u0000\u0002\u0086"+ - "\u0001\u0000\u0000\u0000\u0002\u0088\u0001\u0000\u0000\u0000\u0002\u008a"+ - "\u0001\u0000\u0000\u0000\u0002\u008c\u0001\u0000\u0000\u0000\u0002\u008e"+ - "\u0001\u0000\u0000\u0000\u0002\u0090\u0001\u0000\u0000\u0000\u0002\u0092"+ - "\u0001\u0000\u0000\u0000\u0002\u0094\u0001\u0000\u0000\u0000\u0002\u0096"+ - "\u0001\u0000\u0000\u0000\u0002\u0098\u0001\u0000\u0000\u0000\u0002\u009a"+ - "\u0001\u0000\u0000\u0000\u0002\u009c\u0001\u0000\u0000\u0000\u0002\u009e"+ - "\u0001\u0000\u0000\u0000\u0002\u00a0\u0001\u0000\u0000\u0000\u0002\u00a2"+ - "\u0001\u0000\u0000\u0000\u0002\u00a4\u0001\u0000\u0000\u0000\u0002\u00a6"+ - "\u0001\u0000\u0000\u0000\u0002\u00a8\u0001\u0000\u0000\u0000\u0002\u00ac"+ - "\u0001\u0000\u0000\u0000\u0002\u00ae\u0001\u0000\u0000\u0000\u0002\u00b0"+ - "\u0001\u0000\u0000\u0000\u0002\u00b2\u0001\u0000\u0000\u0000\u0003\u00b4"+ - "\u0001\u0000\u0000\u0000\u0003\u00b6\u0001\u0000\u0000\u0000\u0003\u00b8"+ - "\u0001\u0000\u0000\u0000\u0003\u00ba\u0001\u0000\u0000\u0000\u0003\u00bc"+ - "\u0001\u0000\u0000\u0000\u0003\u00be\u0001\u0000\u0000\u0000\u0003\u00c0"+ - "\u0001\u0000\u0000\u0000\u0003\u00c2\u0001\u0000\u0000\u0000\u0003\u00c6"+ - "\u0001\u0000\u0000\u0000\u0003\u00c8\u0001\u0000\u0000\u0000\u0003\u00ca"+ - "\u0001\u0000\u0000\u0000\u0003\u00cc\u0001\u0000\u0000\u0000\u0003\u00ce"+ - "\u0001\u0000\u0000\u0000\u0004\u00d0\u0001\u0000\u0000\u0000\u0004\u00d2"+ - "\u0001\u0000\u0000\u0000\u0004\u00d4\u0001\u0000\u0000\u0000\u0004\u00da"+ - "\u0001\u0000\u0000\u0000\u0004\u00dc\u0001\u0000\u0000\u0000\u0004\u00de"+ - "\u0001\u0000\u0000\u0000\u0004\u00e0\u0001\u0000\u0000\u0000\u0005\u00e2"+ - "\u0001\u0000\u0000\u0000\u0005\u00e4\u0001\u0000\u0000\u0000\u0005\u00e6"+ - "\u0001\u0000\u0000\u0000\u0005\u00e8\u0001\u0000\u0000\u0000\u0005\u00ea"+ - "\u0001\u0000\u0000\u0000\u0005\u00ec\u0001\u0000\u0000\u0000\u0005\u00ee"+ - "\u0001\u0000\u0000\u0000\u0005\u00f0\u0001\u0000\u0000\u0000\u0005\u00f2"+ - "\u0001\u0000\u0000\u0000\u0006\u00f4\u0001\u0000\u0000\u0000\u0006\u00f6"+ - "\u0001\u0000\u0000\u0000\u0006\u00f8\u0001\u0000\u0000\u0000\u0006\u00fa"+ - "\u0001\u0000\u0000\u0000\u0006\u00fe\u0001\u0000\u0000\u0000\u0006\u0100"+ - "\u0001\u0000\u0000\u0000\u0006\u0102\u0001\u0000\u0000\u0000\u0006\u0104"+ - "\u0001\u0000\u0000\u0000\u0006\u0106\u0001\u0000\u0000\u0000\u0006\u0108"+ - "\u0001\u0000\u0000\u0000\u0007\u010a\u0001\u0000\u0000\u0000\u0007\u010c"+ - "\u0001\u0000\u0000\u0000\u0007\u010e\u0001\u0000\u0000\u0000\u0007\u0110"+ - "\u0001\u0000\u0000\u0000\u0007\u0112\u0001\u0000\u0000\u0000\u0007\u0114"+ - "\u0001\u0000\u0000\u0000\u0007\u0116\u0001\u0000\u0000\u0000\u0007\u0118"+ - "\u0001\u0000\u0000\u0000\u0007\u011a\u0001\u0000\u0000\u0000\u0007\u011c"+ - "\u0001\u0000\u0000\u0000\b\u011e\u0001\u0000\u0000\u0000\b\u0120\u0001"+ - "\u0000\u0000\u0000\b\u0122\u0001\u0000\u0000\u0000\b\u0124\u0001\u0000"+ - "\u0000\u0000\b\u0126\u0001\u0000\u0000\u0000\b\u0128\u0001\u0000\u0000"+ - "\u0000\b\u012a\u0001\u0000\u0000\u0000\t\u012c\u0001\u0000\u0000\u0000"+ - "\t\u012e\u0001\u0000\u0000\u0000\t\u0130\u0001\u0000\u0000\u0000\t\u0132"+ - "\u0001\u0000\u0000\u0000\t\u0134\u0001\u0000\u0000\u0000\n\u0136\u0001"+ - "\u0000\u0000\u0000\n\u0138\u0001\u0000\u0000\u0000\n\u013a\u0001\u0000"+ - "\u0000\u0000\n\u013c\u0001\u0000\u0000\u0000\n\u013e\u0001\u0000\u0000"+ - "\u0000\u000b\u0140\u0001\u0000\u0000\u0000\u000b\u0142\u0001\u0000\u0000"+ - "\u0000\u000b\u0144\u0001\u0000\u0000\u0000\u000b\u0146\u0001\u0000\u0000"+ - "\u0000\u000b\u0148\u0001\u0000\u0000\u0000\u000b\u014a\u0001\u0000\u0000"+ - "\u0000\f\u014c\u0001\u0000\u0000\u0000\u000e\u0156\u0001\u0000\u0000\u0000"+ - "\u0010\u015d\u0001\u0000\u0000\u0000\u0012\u0166\u0001\u0000\u0000\u0000"+ - "\u0014\u016d\u0001\u0000\u0000\u0000\u0016\u0177\u0001\u0000\u0000\u0000"+ - "\u0018\u017e\u0001\u0000\u0000\u0000\u001a\u0185\u0001\u0000\u0000\u0000"+ - "\u001c\u0193\u0001\u0000\u0000\u0000\u001e\u019a\u0001\u0000\u0000\u0000"+ - " \u01a2\u0001\u0000\u0000\u0000\"\u01a9\u0001\u0000\u0000\u0000$\u01b5"+ - "\u0001\u0000\u0000\u0000&\u01be\u0001\u0000\u0000\u0000(\u01c4\u0001\u0000"+ - "\u0000\u0000*\u01cb\u0001\u0000\u0000\u0000,\u01d2\u0001\u0000\u0000\u0000"+ - ".\u01da\u0001\u0000\u0000\u00000\u01e3\u0001\u0000\u0000\u00002\u01e9"+ - "\u0001\u0000\u0000\u00004\u01fa\u0001\u0000\u0000\u00006\u020a\u0001\u0000"+ - "\u0000\u00008\u0210\u0001\u0000\u0000\u0000:\u0215\u0001\u0000\u0000\u0000"+ - "<\u021a\u0001\u0000\u0000\u0000>\u021e\u0001\u0000\u0000\u0000@\u0222"+ - "\u0001\u0000\u0000\u0000B\u0226\u0001\u0000\u0000\u0000D\u022a\u0001\u0000"+ - "\u0000\u0000F\u022c\u0001\u0000\u0000\u0000H\u022e\u0001\u0000\u0000\u0000"+ - "J\u0231\u0001\u0000\u0000\u0000L\u0233\u0001\u0000\u0000\u0000N\u023c"+ - "\u0001\u0000\u0000\u0000P\u023e\u0001\u0000\u0000\u0000R\u0243\u0001\u0000"+ - "\u0000\u0000T\u0245\u0001\u0000\u0000\u0000V\u024a\u0001\u0000\u0000\u0000"+ - "X\u0269\u0001\u0000\u0000\u0000Z\u026c\u0001\u0000\u0000\u0000\\\u029a"+ - "\u0001\u0000\u0000\u0000^\u029c\u0001\u0000\u0000\u0000`\u029f\u0001\u0000"+ - "\u0000\u0000b\u02a3\u0001\u0000\u0000\u0000d\u02a7\u0001\u0000\u0000\u0000"+ - "f\u02a9\u0001\u0000\u0000\u0000h\u02ac\u0001\u0000\u0000\u0000j\u02ae"+ - "\u0001\u0000\u0000\u0000l\u02b3\u0001\u0000\u0000\u0000n\u02b5\u0001\u0000"+ - "\u0000\u0000p\u02bb\u0001\u0000\u0000\u0000r\u02c1\u0001\u0000\u0000\u0000"+ - "t\u02c6\u0001\u0000\u0000\u0000v\u02c8\u0001\u0000\u0000\u0000x\u02cb"+ - "\u0001\u0000\u0000\u0000z\u02ce\u0001\u0000\u0000\u0000|\u02d3\u0001\u0000"+ - "\u0000\u0000~\u02d7\u0001\u0000\u0000\u0000\u0080\u02dc\u0001\u0000\u0000"+ - "\u0000\u0082\u02e2\u0001\u0000\u0000\u0000\u0084\u02e5\u0001\u0000\u0000"+ - "\u0000\u0086\u02e7\u0001\u0000\u0000\u0000\u0088\u02ed\u0001\u0000\u0000"+ - "\u0000\u008a\u02ef\u0001\u0000\u0000\u0000\u008c\u02f4\u0001\u0000\u0000"+ - "\u0000\u008e\u02f7\u0001\u0000\u0000\u0000\u0090\u02fa\u0001\u0000\u0000"+ - "\u0000\u0092\u02fd\u0001\u0000\u0000\u0000\u0094\u02ff\u0001\u0000\u0000"+ - "\u0000\u0096\u0302\u0001\u0000\u0000\u0000\u0098\u0304\u0001\u0000\u0000"+ - "\u0000\u009a\u0307\u0001\u0000\u0000\u0000\u009c\u0309\u0001\u0000\u0000"+ - "\u0000\u009e\u030b\u0001\u0000\u0000\u0000\u00a0\u030d\u0001\u0000\u0000"+ - "\u0000\u00a2\u030f\u0001\u0000\u0000\u0000\u00a4\u0311\u0001\u0000\u0000"+ - "\u0000\u00a6\u0316\u0001\u0000\u0000\u0000\u00a8\u032b\u0001\u0000\u0000"+ - "\u0000\u00aa\u032d\u0001\u0000\u0000\u0000\u00ac\u0335\u0001\u0000\u0000"+ - "\u0000\u00ae\u0337\u0001\u0000\u0000\u0000\u00b0\u033b\u0001\u0000\u0000"+ - "\u0000\u00b2\u033f\u0001\u0000\u0000\u0000\u00b4\u0343\u0001\u0000\u0000"+ - "\u0000\u00b6\u0348\u0001\u0000\u0000\u0000\u00b8\u034c\u0001\u0000\u0000"+ - "\u0000\u00ba\u0350\u0001\u0000\u0000\u0000\u00bc\u0354\u0001\u0000\u0000"+ - "\u0000\u00be\u0358\u0001\u0000\u0000\u0000\u00c0\u035c\u0001\u0000\u0000"+ - "\u0000\u00c2\u0364\u0001\u0000\u0000\u0000\u00c4\u0370\u0001\u0000\u0000"+ - "\u0000\u00c6\u0373\u0001\u0000\u0000\u0000\u00c8\u0377\u0001\u0000\u0000"+ - "\u0000\u00ca\u037b\u0001\u0000\u0000\u0000\u00cc\u037f\u0001\u0000\u0000"+ - "\u0000\u00ce\u0383\u0001\u0000\u0000\u0000\u00d0\u0387\u0001\u0000\u0000"+ - "\u0000\u00d2\u038c\u0001\u0000\u0000\u0000\u00d4\u0390\u0001\u0000\u0000"+ - "\u0000\u00d6\u0398\u0001\u0000\u0000\u0000\u00d8\u03ad\u0001\u0000\u0000"+ - "\u0000\u00da\u03b1\u0001\u0000\u0000\u0000\u00dc\u03b5\u0001\u0000\u0000"+ - "\u0000\u00de\u03b9\u0001\u0000\u0000\u0000\u00e0\u03bd\u0001\u0000\u0000"+ - "\u0000\u00e2\u03c1\u0001\u0000\u0000\u0000\u00e4\u03c6\u0001\u0000\u0000"+ - "\u0000\u00e6\u03ca\u0001\u0000\u0000\u0000\u00e8\u03ce\u0001\u0000\u0000"+ - "\u0000\u00ea\u03d2\u0001\u0000\u0000\u0000\u00ec\u03d5\u0001\u0000\u0000"+ - "\u0000\u00ee\u03d9\u0001\u0000\u0000\u0000\u00f0\u03dd\u0001\u0000\u0000"+ - "\u0000\u00f2\u03e1\u0001\u0000\u0000\u0000\u00f4\u03e5\u0001\u0000\u0000"+ - "\u0000\u00f6\u03ea\u0001\u0000\u0000\u0000\u00f8\u03ef\u0001\u0000\u0000"+ - "\u0000\u00fa\u03f4\u0001\u0000\u0000\u0000\u00fc\u03fb\u0001\u0000\u0000"+ - "\u0000\u00fe\u0404\u0001\u0000\u0000\u0000\u0100\u040b\u0001\u0000\u0000"+ - "\u0000\u0102\u040f\u0001\u0000\u0000\u0000\u0104\u0413\u0001\u0000\u0000"+ - "\u0000\u0106\u0417\u0001\u0000\u0000\u0000\u0108\u041b\u0001\u0000\u0000"+ - "\u0000\u010a\u041f\u0001\u0000\u0000\u0000\u010c\u0425\u0001\u0000\u0000"+ - "\u0000\u010e\u0429\u0001\u0000\u0000\u0000\u0110\u042d\u0001\u0000\u0000"+ - "\u0000\u0112\u0431\u0001\u0000\u0000\u0000\u0114\u0435\u0001\u0000\u0000"+ - "\u0000\u0116\u0439\u0001\u0000\u0000\u0000\u0118\u043d\u0001\u0000\u0000"+ - "\u0000\u011a\u0441\u0001\u0000\u0000\u0000\u011c\u0445\u0001\u0000\u0000"+ - "\u0000\u011e\u0449\u0001\u0000\u0000\u0000\u0120\u044e\u0001\u0000\u0000"+ - "\u0000\u0122\u0452\u0001\u0000\u0000\u0000\u0124\u0456\u0001\u0000\u0000"+ - "\u0000\u0126\u045a\u0001\u0000\u0000\u0000\u0128\u045e\u0001\u0000\u0000"+ - "\u0000\u012a\u0462\u0001\u0000\u0000\u0000\u012c\u0466\u0001\u0000\u0000"+ - "\u0000\u012e\u046b\u0001\u0000\u0000\u0000\u0130\u0470\u0001\u0000\u0000"+ - "\u0000\u0132\u0474\u0001\u0000\u0000\u0000\u0134\u0478\u0001\u0000\u0000"+ - "\u0000\u0136\u047c\u0001\u0000\u0000\u0000\u0138\u0481\u0001\u0000\u0000"+ - "\u0000\u013a\u048b\u0001\u0000\u0000\u0000\u013c\u048f\u0001\u0000\u0000"+ - "\u0000\u013e\u0493\u0001\u0000\u0000\u0000\u0140\u0497\u0001\u0000\u0000"+ - "\u0000\u0142\u049c\u0001\u0000\u0000\u0000\u0144\u04a3\u0001\u0000\u0000"+ - "\u0000\u0146\u04a7\u0001\u0000\u0000\u0000\u0148\u04ab\u0001\u0000\u0000"+ - "\u0000\u014a\u04af\u0001\u0000\u0000\u0000\u014c\u014d\u0005d\u0000\u0000"+ - "\u014d\u014e\u0005i\u0000\u0000\u014e\u014f\u0005s\u0000\u0000\u014f\u0150"+ - "\u0005s\u0000\u0000\u0150\u0151\u0005e\u0000\u0000\u0151\u0152\u0005c"+ - "\u0000\u0000\u0152\u0153\u0005t\u0000\u0000\u0153\u0154\u0001\u0000\u0000"+ - "\u0000\u0154\u0155\u0006\u0000\u0000\u0000\u0155\r\u0001\u0000\u0000\u0000"+ - "\u0156\u0157\u0005d\u0000\u0000\u0157\u0158\u0005r\u0000\u0000\u0158\u0159"+ - "\u0005o\u0000\u0000\u0159\u015a\u0005p\u0000\u0000\u015a\u015b\u0001\u0000"+ - "\u0000\u0000\u015b\u015c\u0006\u0001\u0001\u0000\u015c\u000f\u0001\u0000"+ - "\u0000\u0000\u015d\u015e\u0005e\u0000\u0000\u015e\u015f\u0005n\u0000\u0000"+ - "\u015f\u0160\u0005r\u0000\u0000\u0160\u0161\u0005i\u0000\u0000\u0161\u0162"+ - "\u0005c\u0000\u0000\u0162\u0163\u0005h\u0000\u0000\u0163\u0164\u0001\u0000"+ - "\u0000\u0000\u0164\u0165\u0006\u0002\u0002\u0000\u0165\u0011\u0001\u0000"+ - "\u0000\u0000\u0166\u0167\u0005e\u0000\u0000\u0167\u0168\u0005v\u0000\u0000"+ - "\u0168\u0169\u0005a\u0000\u0000\u0169\u016a\u0005l\u0000\u0000\u016a\u016b"+ - "\u0001\u0000\u0000\u0000\u016b\u016c\u0006\u0003\u0000\u0000\u016c\u0013"+ - "\u0001\u0000\u0000\u0000\u016d\u016e\u0005e\u0000\u0000\u016e\u016f\u0005"+ - "x\u0000\u0000\u016f\u0170\u0005p\u0000\u0000\u0170\u0171\u0005l\u0000"+ - "\u0000\u0171\u0172\u0005a\u0000\u0000\u0172\u0173\u0005i\u0000\u0000\u0173"+ - "\u0174\u0005n\u0000\u0000\u0174\u0175\u0001\u0000\u0000\u0000\u0175\u0176"+ - "\u0006\u0004\u0003\u0000\u0176\u0015\u0001\u0000\u0000\u0000\u0177\u0178"+ - "\u0005f\u0000\u0000\u0178\u0179\u0005r\u0000\u0000\u0179\u017a\u0005o"+ - "\u0000\u0000\u017a\u017b\u0005m\u0000\u0000\u017b\u017c\u0001\u0000\u0000"+ - "\u0000\u017c\u017d\u0006\u0005\u0004\u0000\u017d\u0017\u0001\u0000\u0000"+ - "\u0000\u017e\u017f\u0005g\u0000\u0000\u017f\u0180\u0005r\u0000\u0000\u0180"+ - "\u0181\u0005o\u0000\u0000\u0181\u0182\u0005k\u0000\u0000\u0182\u0183\u0001"+ - "\u0000\u0000\u0000\u0183\u0184\u0006\u0006\u0000\u0000\u0184\u0019\u0001"+ - "\u0000\u0000\u0000\u0185\u0186\u0005i\u0000\u0000\u0186\u0187\u0005n\u0000"+ - "\u0000\u0187\u0188\u0005l\u0000\u0000\u0188\u0189\u0005i\u0000\u0000\u0189"+ - "\u018a\u0005n\u0000\u0000\u018a\u018b\u0005e\u0000\u0000\u018b\u018c\u0005"+ - "s\u0000\u0000\u018c\u018d\u0005t\u0000\u0000\u018d\u018e\u0005a\u0000"+ - "\u0000\u018e\u018f\u0005t\u0000\u0000\u018f\u0190\u0005s\u0000\u0000\u0190"+ - "\u0191\u0001\u0000\u0000\u0000\u0191\u0192\u0006\u0007\u0000\u0000\u0192"+ - "\u001b\u0001\u0000\u0000\u0000\u0193\u0194\u0005k\u0000\u0000\u0194\u0195"+ - "\u0005e\u0000\u0000\u0195\u0196\u0005e\u0000\u0000\u0196\u0197\u0005p"+ - "\u0000\u0000\u0197\u0198\u0001\u0000\u0000\u0000\u0198\u0199\u0006\b\u0001"+ - "\u0000\u0199\u001d\u0001\u0000\u0000\u0000\u019a\u019b\u0005l\u0000\u0000"+ - "\u019b\u019c\u0005i\u0000\u0000\u019c\u019d\u0005m\u0000\u0000\u019d\u019e"+ - "\u0005i\u0000\u0000\u019e\u019f\u0005t\u0000\u0000\u019f\u01a0\u0001\u0000"+ - "\u0000\u0000\u01a0\u01a1\u0006\t\u0000\u0000\u01a1\u001f\u0001\u0000\u0000"+ - "\u0000\u01a2\u01a3\u0005m\u0000\u0000\u01a3\u01a4\u0005e\u0000\u0000\u01a4"+ - "\u01a5\u0005t\u0000\u0000\u01a5\u01a6\u0005a\u0000\u0000\u01a6\u01a7\u0001"+ - "\u0000\u0000\u0000\u01a7\u01a8\u0006\n\u0005\u0000\u01a8!\u0001\u0000"+ - "\u0000\u0000\u01a9\u01aa\u0005m\u0000\u0000\u01aa\u01ab\u0005v\u0000\u0000"+ - "\u01ab\u01ac\u0005_\u0000\u0000\u01ac\u01ad\u0005e\u0000\u0000\u01ad\u01ae"+ - "\u0005x\u0000\u0000\u01ae\u01af\u0005p\u0000\u0000\u01af\u01b0\u0005a"+ - "\u0000\u0000\u01b0\u01b1\u0005n\u0000\u0000\u01b1\u01b2\u0005d\u0000\u0000"+ - "\u01b2\u01b3\u0001\u0000\u0000\u0000\u01b3\u01b4\u0006\u000b\u0006\u0000"+ - "\u01b4#\u0001\u0000\u0000\u0000\u01b5\u01b6\u0005r\u0000\u0000\u01b6\u01b7"+ - "\u0005e\u0000\u0000\u01b7\u01b8\u0005n\u0000\u0000\u01b8\u01b9\u0005a"+ - "\u0000\u0000\u01b9\u01ba\u0005m\u0000\u0000\u01ba\u01bb\u0005e\u0000\u0000"+ - "\u01bb\u01bc\u0001\u0000\u0000\u0000\u01bc\u01bd\u0006\f\u0007\u0000\u01bd"+ - "%\u0001\u0000\u0000\u0000\u01be\u01bf\u0005r\u0000\u0000\u01bf\u01c0\u0005"+ - "o\u0000\u0000\u01c0\u01c1\u0005w\u0000\u0000\u01c1\u01c2\u0001\u0000\u0000"+ - "\u0000\u01c2\u01c3\u0006\r\u0000\u0000\u01c3\'\u0001\u0000\u0000\u0000"+ - "\u01c4\u01c5\u0005s\u0000\u0000\u01c5\u01c6\u0005h\u0000\u0000\u01c6\u01c7"+ - "\u0005o\u0000\u0000\u01c7\u01c8\u0005w\u0000\u0000\u01c8\u01c9\u0001\u0000"+ - "\u0000\u0000\u01c9\u01ca\u0006\u000e\b\u0000\u01ca)\u0001\u0000\u0000"+ - "\u0000\u01cb\u01cc\u0005s\u0000\u0000\u01cc\u01cd\u0005o\u0000\u0000\u01cd"+ - "\u01ce\u0005r\u0000\u0000\u01ce\u01cf\u0005t\u0000\u0000\u01cf\u01d0\u0001"+ - "\u0000\u0000\u0000\u01d0\u01d1\u0006\u000f\u0000\u0000\u01d1+\u0001\u0000"+ - "\u0000\u0000\u01d2\u01d3\u0005s\u0000\u0000\u01d3\u01d4\u0005t\u0000\u0000"+ - "\u01d4\u01d5\u0005a\u0000\u0000\u01d5\u01d6\u0005t\u0000\u0000\u01d6\u01d7"+ - "\u0005s\u0000\u0000\u01d7\u01d8\u0001\u0000\u0000\u0000\u01d8\u01d9\u0006"+ - "\u0010\u0000\u0000\u01d9-\u0001\u0000\u0000\u0000\u01da\u01db\u0005w\u0000"+ - "\u0000\u01db\u01dc\u0005h\u0000\u0000\u01dc\u01dd\u0005e\u0000\u0000\u01dd"+ - "\u01de\u0005r\u0000\u0000\u01de\u01df\u0005e\u0000\u0000\u01df\u01e0\u0001"+ - "\u0000\u0000\u0000\u01e0\u01e1\u0006\u0011\u0000\u0000\u01e1/\u0001\u0000"+ - "\u0000\u0000\u01e2\u01e4\b\u0000\u0000\u0000\u01e3\u01e2\u0001\u0000\u0000"+ - "\u0000\u01e4\u01e5\u0001\u0000\u0000\u0000\u01e5\u01e3\u0001\u0000\u0000"+ - "\u0000\u01e5\u01e6\u0001\u0000\u0000\u0000\u01e6\u01e7\u0001\u0000\u0000"+ - "\u0000\u01e7\u01e8\u0006\u0012\u0000\u0000\u01e81\u0001\u0000\u0000\u0000"+ - "\u01e9\u01ea\u0005/\u0000\u0000\u01ea\u01eb\u0005/\u0000\u0000\u01eb\u01ef"+ - "\u0001\u0000\u0000\u0000\u01ec\u01ee\b\u0001\u0000\u0000\u01ed\u01ec\u0001"+ - "\u0000\u0000\u0000\u01ee\u01f1\u0001\u0000\u0000\u0000\u01ef\u01ed\u0001"+ - "\u0000\u0000\u0000\u01ef\u01f0\u0001\u0000\u0000\u0000\u01f0\u01f3\u0001"+ - "\u0000\u0000\u0000\u01f1\u01ef\u0001\u0000\u0000\u0000\u01f2\u01f4\u0005"+ - "\r\u0000\u0000\u01f3\u01f2\u0001\u0000\u0000\u0000\u01f3\u01f4\u0001\u0000"+ - "\u0000\u0000\u01f4\u01f6\u0001\u0000\u0000\u0000\u01f5\u01f7\u0005\n\u0000"+ - "\u0000\u01f6\u01f5\u0001\u0000\u0000\u0000\u01f6\u01f7\u0001\u0000\u0000"+ - "\u0000\u01f7\u01f8\u0001\u0000\u0000\u0000\u01f8\u01f9\u0006\u0013\t\u0000"+ - "\u01f93\u0001\u0000\u0000\u0000\u01fa\u01fb\u0005/\u0000\u0000\u01fb\u01fc"+ - "\u0005*\u0000\u0000\u01fc\u0201\u0001\u0000\u0000\u0000\u01fd\u0200\u0003"+ - "4\u0014\u0000\u01fe\u0200\t\u0000\u0000\u0000\u01ff\u01fd\u0001\u0000"+ - "\u0000\u0000\u01ff\u01fe\u0001\u0000\u0000\u0000\u0200\u0203\u0001\u0000"+ - "\u0000\u0000\u0201\u0202\u0001\u0000\u0000\u0000\u0201\u01ff\u0001\u0000"+ - "\u0000\u0000\u0202\u0204\u0001\u0000\u0000\u0000\u0203\u0201\u0001\u0000"+ - "\u0000\u0000\u0204\u0205\u0005*\u0000\u0000\u0205\u0206\u0005/\u0000\u0000"+ - "\u0206\u0207\u0001\u0000\u0000\u0000\u0207\u0208\u0006\u0014\t\u0000\u0208"+ - "5\u0001\u0000\u0000\u0000\u0209\u020b\u0007\u0002\u0000\u0000\u020a\u0209"+ - "\u0001\u0000\u0000\u0000\u020b\u020c\u0001\u0000\u0000\u0000\u020c\u020a"+ - "\u0001\u0000\u0000\u0000\u020c\u020d\u0001\u0000\u0000\u0000\u020d\u020e"+ - "\u0001\u0000\u0000\u0000\u020e\u020f\u0006\u0015\t\u0000\u020f7\u0001"+ - "\u0000\u0000\u0000\u0210\u0211\u0003\u00a4L\u0000\u0211\u0212\u0001\u0000"+ - "\u0000\u0000\u0212\u0213\u0006\u0016\n\u0000\u0213\u0214\u0006\u0016\u000b"+ - "\u0000\u02149\u0001\u0000\u0000\u0000\u0215\u0216\u0003B\u001b\u0000\u0216"+ - "\u0217\u0001\u0000\u0000\u0000\u0217\u0218\u0006\u0017\f\u0000\u0218\u0219"+ - "\u0006\u0017\r\u0000\u0219;\u0001\u0000\u0000\u0000\u021a\u021b\u0003"+ - "6\u0015\u0000\u021b\u021c\u0001\u0000\u0000\u0000\u021c\u021d\u0006\u0018"+ - "\t\u0000\u021d=\u0001\u0000\u0000\u0000\u021e\u021f\u00032\u0013\u0000"+ - "\u021f\u0220\u0001\u0000\u0000\u0000\u0220\u0221\u0006\u0019\t\u0000\u0221"+ - "?\u0001\u0000\u0000\u0000\u0222\u0223\u00034\u0014\u0000\u0223\u0224\u0001"+ - "\u0000\u0000\u0000\u0224\u0225\u0006\u001a\t\u0000\u0225A\u0001\u0000"+ - "\u0000\u0000\u0226\u0227\u0005|\u0000\u0000\u0227\u0228\u0001\u0000\u0000"+ - "\u0000\u0228\u0229\u0006\u001b\r\u0000\u0229C\u0001\u0000\u0000\u0000"+ - "\u022a\u022b\u0007\u0003\u0000\u0000\u022bE\u0001\u0000\u0000\u0000\u022c"+ - "\u022d\u0007\u0004\u0000\u0000\u022dG\u0001\u0000\u0000\u0000\u022e\u022f"+ - "\u0005\\\u0000\u0000\u022f\u0230\u0007\u0005\u0000\u0000\u0230I\u0001"+ - "\u0000\u0000\u0000\u0231\u0232\b\u0006\u0000\u0000\u0232K\u0001\u0000"+ - "\u0000\u0000\u0233\u0235\u0007\u0007\u0000\u0000\u0234\u0236\u0007\b\u0000"+ - "\u0000\u0235\u0234\u0001\u0000\u0000\u0000\u0235\u0236\u0001\u0000\u0000"+ - "\u0000\u0236\u0238\u0001\u0000\u0000\u0000\u0237\u0239\u0003D\u001c\u0000"+ - "\u0238\u0237\u0001\u0000\u0000\u0000\u0239\u023a\u0001\u0000\u0000\u0000"+ - "\u023a\u0238\u0001\u0000\u0000\u0000\u023a\u023b\u0001\u0000\u0000\u0000"+ - "\u023bM\u0001\u0000\u0000\u0000\u023c\u023d\u0005@\u0000\u0000\u023dO"+ - "\u0001\u0000\u0000\u0000\u023e\u023f\u0005`\u0000\u0000\u023fQ\u0001\u0000"+ - "\u0000\u0000\u0240\u0244\b\t\u0000\u0000\u0241\u0242\u0005`\u0000\u0000"+ - "\u0242\u0244\u0005`\u0000\u0000\u0243\u0240\u0001\u0000\u0000\u0000\u0243"+ - "\u0241\u0001\u0000\u0000\u0000\u0244S\u0001\u0000\u0000\u0000\u0245\u0246"+ - "\u0005_\u0000\u0000\u0246U\u0001\u0000\u0000\u0000\u0247\u024b\u0003F"+ - "\u001d\u0000\u0248\u024b\u0003D\u001c\u0000\u0249\u024b\u0003T$\u0000"+ - "\u024a\u0247\u0001\u0000\u0000\u0000\u024a\u0248\u0001\u0000\u0000\u0000"+ - "\u024a\u0249\u0001\u0000\u0000\u0000\u024bW\u0001\u0000\u0000\u0000\u024c"+ - "\u0251\u0005\"\u0000\u0000\u024d\u0250\u0003H\u001e\u0000\u024e\u0250"+ - "\u0003J\u001f\u0000\u024f\u024d\u0001\u0000\u0000\u0000\u024f\u024e\u0001"+ - "\u0000\u0000\u0000\u0250\u0253\u0001\u0000\u0000\u0000\u0251\u024f\u0001"+ - "\u0000\u0000\u0000\u0251\u0252\u0001\u0000\u0000\u0000\u0252\u0254\u0001"+ - "\u0000\u0000\u0000\u0253\u0251\u0001\u0000\u0000\u0000\u0254\u026a\u0005"+ - "\"\u0000\u0000\u0255\u0256\u0005\"\u0000\u0000\u0256\u0257\u0005\"\u0000"+ - "\u0000\u0257\u0258\u0005\"\u0000\u0000\u0258\u025c\u0001\u0000\u0000\u0000"+ - "\u0259\u025b\b\u0001\u0000\u0000\u025a\u0259\u0001\u0000\u0000\u0000\u025b"+ - "\u025e\u0001\u0000\u0000\u0000\u025c\u025d\u0001\u0000\u0000\u0000\u025c"+ - "\u025a\u0001\u0000\u0000\u0000\u025d\u025f\u0001\u0000\u0000\u0000\u025e"+ - "\u025c\u0001\u0000\u0000\u0000\u025f\u0260\u0005\"\u0000\u0000\u0260\u0261"+ - "\u0005\"\u0000\u0000\u0261\u0262\u0005\"\u0000\u0000\u0262\u0264\u0001"+ - "\u0000\u0000\u0000\u0263\u0265\u0005\"\u0000\u0000\u0264\u0263\u0001\u0000"+ - "\u0000\u0000\u0264\u0265\u0001\u0000\u0000\u0000\u0265\u0267\u0001\u0000"+ - "\u0000\u0000\u0266\u0268\u0005\"\u0000\u0000\u0267\u0266\u0001\u0000\u0000"+ - "\u0000\u0267\u0268\u0001\u0000\u0000\u0000\u0268\u026a\u0001\u0000\u0000"+ - "\u0000\u0269\u024c\u0001\u0000\u0000\u0000\u0269\u0255\u0001\u0000\u0000"+ - "\u0000\u026aY\u0001\u0000\u0000\u0000\u026b\u026d\u0003D\u001c\u0000\u026c"+ - "\u026b\u0001\u0000\u0000\u0000\u026d\u026e\u0001\u0000\u0000\u0000\u026e"+ - "\u026c\u0001\u0000\u0000\u0000\u026e\u026f\u0001\u0000\u0000\u0000\u026f"+ - "[\u0001\u0000\u0000\u0000\u0270\u0272\u0003D\u001c\u0000\u0271\u0270\u0001"+ - "\u0000\u0000\u0000\u0272\u0273\u0001\u0000\u0000\u0000\u0273\u0271\u0001"+ - "\u0000\u0000\u0000\u0273\u0274\u0001\u0000\u0000\u0000\u0274\u0275\u0001"+ - "\u0000\u0000\u0000\u0275\u0279\u0003l0\u0000\u0276\u0278\u0003D\u001c"+ - "\u0000\u0277\u0276\u0001\u0000\u0000\u0000\u0278\u027b\u0001\u0000\u0000"+ - "\u0000\u0279\u0277\u0001\u0000\u0000\u0000\u0279\u027a\u0001\u0000\u0000"+ - "\u0000\u027a\u029b\u0001\u0000\u0000\u0000\u027b\u0279\u0001\u0000\u0000"+ - "\u0000\u027c\u027e\u0003l0\u0000\u027d\u027f\u0003D\u001c\u0000\u027e"+ - "\u027d\u0001\u0000\u0000\u0000\u027f\u0280\u0001\u0000\u0000\u0000\u0280"+ - "\u027e\u0001\u0000\u0000\u0000\u0280\u0281\u0001\u0000\u0000\u0000\u0281"+ - "\u029b\u0001\u0000\u0000\u0000\u0282\u0284\u0003D\u001c\u0000\u0283\u0282"+ - "\u0001\u0000\u0000\u0000\u0284\u0285\u0001\u0000\u0000\u0000\u0285\u0283"+ - "\u0001\u0000\u0000\u0000\u0285\u0286\u0001\u0000\u0000\u0000\u0286\u028e"+ - "\u0001\u0000\u0000\u0000\u0287\u028b\u0003l0\u0000\u0288\u028a\u0003D"+ - "\u001c\u0000\u0289\u0288\u0001\u0000\u0000\u0000\u028a\u028d\u0001\u0000"+ - "\u0000\u0000\u028b\u0289\u0001\u0000\u0000\u0000\u028b\u028c\u0001\u0000"+ - "\u0000\u0000\u028c\u028f\u0001\u0000\u0000\u0000\u028d\u028b\u0001\u0000"+ - "\u0000\u0000\u028e\u0287\u0001\u0000\u0000\u0000\u028e\u028f\u0001\u0000"+ - "\u0000\u0000\u028f\u0290\u0001\u0000\u0000\u0000\u0290\u0291\u0003L \u0000"+ - "\u0291\u029b\u0001\u0000\u0000\u0000\u0292\u0294\u0003l0\u0000\u0293\u0295"+ - "\u0003D\u001c\u0000\u0294\u0293\u0001\u0000\u0000\u0000\u0295\u0296\u0001"+ - "\u0000\u0000\u0000\u0296\u0294\u0001\u0000\u0000\u0000\u0296\u0297\u0001"+ - "\u0000\u0000\u0000\u0297\u0298\u0001\u0000\u0000\u0000\u0298\u0299\u0003"+ - "L \u0000\u0299\u029b\u0001\u0000\u0000\u0000\u029a\u0271\u0001\u0000\u0000"+ - "\u0000\u029a\u027c\u0001\u0000\u0000\u0000\u029a\u0283\u0001\u0000\u0000"+ - "\u0000\u029a\u0292\u0001\u0000\u0000\u0000\u029b]\u0001\u0000\u0000\u0000"+ - "\u029c\u029d\u0005b\u0000\u0000\u029d\u029e\u0005y\u0000\u0000\u029e_"+ - "\u0001\u0000\u0000\u0000\u029f\u02a0\u0005a\u0000\u0000\u02a0\u02a1\u0005"+ - "n\u0000\u0000\u02a1\u02a2\u0005d\u0000\u0000\u02a2a\u0001\u0000\u0000"+ - "\u0000\u02a3\u02a4\u0005a\u0000\u0000\u02a4\u02a5\u0005s\u0000\u0000\u02a5"+ - "\u02a6\u0005c\u0000\u0000\u02a6c\u0001\u0000\u0000\u0000\u02a7\u02a8\u0005"+ - "=\u0000\u0000\u02a8e\u0001\u0000\u0000\u0000\u02a9\u02aa\u0005:\u0000"+ - "\u0000\u02aa\u02ab\u0005:\u0000\u0000\u02abg\u0001\u0000\u0000\u0000\u02ac"+ - "\u02ad\u0005,\u0000\u0000\u02adi\u0001\u0000\u0000\u0000\u02ae\u02af\u0005"+ - "d\u0000\u0000\u02af\u02b0\u0005e\u0000\u0000\u02b0\u02b1\u0005s\u0000"+ - "\u0000\u02b1\u02b2\u0005c\u0000\u0000\u02b2k\u0001\u0000\u0000\u0000\u02b3"+ - "\u02b4\u0005.\u0000\u0000\u02b4m\u0001\u0000\u0000\u0000\u02b5\u02b6\u0005"+ - "f\u0000\u0000\u02b6\u02b7\u0005a\u0000\u0000\u02b7\u02b8\u0005l\u0000"+ - "\u0000\u02b8\u02b9\u0005s\u0000\u0000\u02b9\u02ba\u0005e\u0000\u0000\u02ba"+ - "o\u0001\u0000\u0000\u0000\u02bb\u02bc\u0005f\u0000\u0000\u02bc\u02bd\u0005"+ - "i\u0000\u0000\u02bd\u02be\u0005r\u0000\u0000\u02be\u02bf\u0005s\u0000"+ - "\u0000\u02bf\u02c0\u0005t\u0000\u0000\u02c0q\u0001\u0000\u0000\u0000\u02c1"+ - "\u02c2\u0005l\u0000\u0000\u02c2\u02c3\u0005a\u0000\u0000\u02c3\u02c4\u0005"+ - "s\u0000\u0000\u02c4\u02c5\u0005t\u0000\u0000\u02c5s\u0001\u0000\u0000"+ - "\u0000\u02c6\u02c7\u0005(\u0000\u0000\u02c7u\u0001\u0000\u0000\u0000\u02c8"+ - "\u02c9\u0005i\u0000\u0000\u02c9\u02ca\u0005n\u0000\u0000\u02caw\u0001"+ - "\u0000\u0000\u0000\u02cb\u02cc\u0005i\u0000\u0000\u02cc\u02cd\u0005s\u0000"+ - "\u0000\u02cdy\u0001\u0000\u0000\u0000\u02ce\u02cf\u0005l\u0000\u0000\u02cf"+ - "\u02d0\u0005i\u0000\u0000\u02d0\u02d1\u0005k\u0000\u0000\u02d1\u02d2\u0005"+ - "e\u0000\u0000\u02d2{\u0001\u0000\u0000\u0000\u02d3\u02d4\u0005n\u0000"+ - "\u0000\u02d4\u02d5\u0005o\u0000\u0000\u02d5\u02d6\u0005t\u0000\u0000\u02d6"+ - "}\u0001\u0000\u0000\u0000\u02d7\u02d8\u0005n\u0000\u0000\u02d8\u02d9\u0005"+ - "u\u0000\u0000\u02d9\u02da\u0005l\u0000\u0000\u02da\u02db\u0005l\u0000"+ - "\u0000\u02db\u007f\u0001\u0000\u0000\u0000\u02dc\u02dd\u0005n\u0000\u0000"+ - "\u02dd\u02de\u0005u\u0000\u0000\u02de\u02df\u0005l\u0000\u0000\u02df\u02e0"+ - "\u0005l\u0000\u0000\u02e0\u02e1\u0005s\u0000\u0000\u02e1\u0081\u0001\u0000"+ - "\u0000\u0000\u02e2\u02e3\u0005o\u0000\u0000\u02e3\u02e4\u0005r\u0000\u0000"+ - "\u02e4\u0083\u0001\u0000\u0000\u0000\u02e5\u02e6\u0005?\u0000\u0000\u02e6"+ - "\u0085\u0001\u0000\u0000\u0000\u02e7\u02e8\u0005r\u0000\u0000\u02e8\u02e9"+ - "\u0005l\u0000\u0000\u02e9\u02ea\u0005i\u0000\u0000\u02ea\u02eb\u0005k"+ - "\u0000\u0000\u02eb\u02ec\u0005e\u0000\u0000\u02ec\u0087\u0001\u0000\u0000"+ - "\u0000\u02ed\u02ee\u0005)\u0000\u0000\u02ee\u0089\u0001\u0000\u0000\u0000"+ - "\u02ef\u02f0\u0005t\u0000\u0000\u02f0\u02f1\u0005r\u0000\u0000\u02f1\u02f2"+ - "\u0005u\u0000\u0000\u02f2\u02f3\u0005e\u0000\u0000\u02f3\u008b\u0001\u0000"+ - "\u0000\u0000\u02f4\u02f5\u0005=\u0000\u0000\u02f5\u02f6\u0005=\u0000\u0000"+ - "\u02f6\u008d\u0001\u0000\u0000\u0000\u02f7\u02f8\u0005=\u0000\u0000\u02f8"+ - "\u02f9\u0005~\u0000\u0000\u02f9\u008f\u0001\u0000\u0000\u0000\u02fa\u02fb"+ - "\u0005!\u0000\u0000\u02fb\u02fc\u0005=\u0000\u0000\u02fc\u0091\u0001\u0000"+ - "\u0000\u0000\u02fd\u02fe\u0005<\u0000\u0000\u02fe\u0093\u0001\u0000\u0000"+ - "\u0000\u02ff\u0300\u0005<\u0000\u0000\u0300\u0301\u0005=\u0000\u0000\u0301"+ - "\u0095\u0001\u0000\u0000\u0000\u0302\u0303\u0005>\u0000\u0000\u0303\u0097"+ - "\u0001\u0000\u0000\u0000\u0304\u0305\u0005>\u0000\u0000\u0305\u0306\u0005"+ - "=\u0000\u0000\u0306\u0099\u0001\u0000\u0000\u0000\u0307\u0308\u0005+\u0000"+ - "\u0000\u0308\u009b\u0001\u0000\u0000\u0000\u0309\u030a\u0005-\u0000\u0000"+ - "\u030a\u009d\u0001\u0000\u0000\u0000\u030b\u030c\u0005*\u0000\u0000\u030c"+ - "\u009f\u0001\u0000\u0000\u0000\u030d\u030e\u0005/\u0000\u0000\u030e\u00a1"+ - "\u0001\u0000\u0000\u0000\u030f\u0310\u0005%\u0000\u0000\u0310\u00a3\u0001"+ - "\u0000\u0000\u0000\u0311\u0312\u0005[\u0000\u0000\u0312\u0313\u0001\u0000"+ - "\u0000\u0000\u0313\u0314\u0006L\u0000\u0000\u0314\u0315\u0006L\u0000\u0000"+ - "\u0315\u00a5\u0001\u0000\u0000\u0000\u0316\u0317\u0005]\u0000\u0000\u0317"+ - "\u0318\u0001\u0000\u0000\u0000\u0318\u0319\u0006M\r\u0000\u0319\u031a"+ - "\u0006M\r\u0000\u031a\u00a7\u0001\u0000\u0000\u0000\u031b\u031f\u0003"+ - "F\u001d\u0000\u031c\u031e\u0003V%\u0000\u031d\u031c\u0001\u0000\u0000"+ - "\u0000\u031e\u0321\u0001\u0000\u0000\u0000\u031f\u031d\u0001\u0000\u0000"+ - "\u0000\u031f\u0320\u0001\u0000\u0000\u0000\u0320\u032c\u0001\u0000\u0000"+ - "\u0000\u0321\u031f\u0001\u0000\u0000\u0000\u0322\u0325\u0003T$\u0000\u0323"+ - "\u0325\u0003N!\u0000\u0324\u0322\u0001\u0000\u0000\u0000\u0324\u0323\u0001"+ - "\u0000\u0000\u0000\u0325\u0327\u0001\u0000\u0000\u0000\u0326\u0328\u0003"+ - "V%\u0000\u0327\u0326\u0001\u0000\u0000\u0000\u0328\u0329\u0001\u0000\u0000"+ - "\u0000\u0329\u0327\u0001\u0000\u0000\u0000\u0329\u032a\u0001\u0000\u0000"+ - "\u0000\u032a\u032c\u0001\u0000\u0000\u0000\u032b\u031b\u0001\u0000\u0000"+ - "\u0000\u032b\u0324\u0001\u0000\u0000\u0000\u032c\u00a9\u0001\u0000\u0000"+ - "\u0000\u032d\u032f\u0003P\"\u0000\u032e\u0330\u0003R#\u0000\u032f\u032e"+ - "\u0001\u0000\u0000\u0000\u0330\u0331\u0001\u0000\u0000\u0000\u0331\u032f"+ - "\u0001\u0000\u0000\u0000\u0331\u0332\u0001\u0000\u0000\u0000\u0332\u0333"+ - "\u0001\u0000\u0000\u0000\u0333\u0334\u0003P\"\u0000\u0334\u00ab\u0001"+ - "\u0000\u0000\u0000\u0335\u0336\u0003\u00aaO\u0000\u0336\u00ad\u0001\u0000"+ - "\u0000\u0000\u0337\u0338\u00032\u0013\u0000\u0338\u0339\u0001\u0000\u0000"+ - "\u0000\u0339\u033a\u0006Q\t\u0000\u033a\u00af\u0001\u0000\u0000\u0000"+ - "\u033b\u033c\u00034\u0014\u0000\u033c\u033d\u0001\u0000\u0000\u0000\u033d"+ - "\u033e\u0006R\t\u0000\u033e\u00b1\u0001\u0000\u0000\u0000\u033f\u0340"+ - "\u00036\u0015\u0000\u0340\u0341\u0001\u0000\u0000\u0000\u0341\u0342\u0006"+ - "S\t\u0000\u0342\u00b3\u0001\u0000\u0000\u0000\u0343\u0344\u0003B\u001b"+ - "\u0000\u0344\u0345\u0001\u0000\u0000\u0000\u0345\u0346\u0006T\f\u0000"+ - "\u0346\u0347\u0006T\r\u0000\u0347\u00b5\u0001\u0000\u0000\u0000\u0348"+ - "\u0349\u0003\u00a4L\u0000\u0349\u034a\u0001\u0000\u0000\u0000\u034a\u034b"+ - "\u0006U\n\u0000\u034b\u00b7\u0001\u0000\u0000\u0000\u034c\u034d\u0003"+ - "\u00a6M\u0000\u034d\u034e\u0001\u0000\u0000\u0000\u034e\u034f\u0006V\u000e"+ - "\u0000\u034f\u00b9\u0001\u0000\u0000\u0000\u0350\u0351\u0003h.\u0000\u0351"+ - "\u0352\u0001\u0000\u0000\u0000\u0352\u0353\u0006W\u000f\u0000\u0353\u00bb"+ - "\u0001\u0000\u0000\u0000\u0354\u0355\u0003d,\u0000\u0355\u0356\u0001\u0000"+ - "\u0000\u0000\u0356\u0357\u0006X\u0010\u0000\u0357\u00bd\u0001\u0000\u0000"+ - "\u0000\u0358\u0359\u0003X&\u0000\u0359\u035a\u0001\u0000\u0000\u0000\u035a"+ - "\u035b\u0006Y\u0011\u0000\u035b\u00bf\u0001\u0000\u0000\u0000\u035c\u035d"+ - "\u0005o\u0000\u0000\u035d\u035e\u0005p\u0000\u0000\u035e\u035f\u0005t"+ - "\u0000\u0000\u035f\u0360\u0005i\u0000\u0000\u0360\u0361\u0005o\u0000\u0000"+ - "\u0361\u0362\u0005n\u0000\u0000\u0362\u0363\u0005s\u0000\u0000\u0363\u00c1"+ - "\u0001\u0000\u0000\u0000\u0364\u0365\u0005m\u0000\u0000\u0365\u0366\u0005"+ - "e\u0000\u0000\u0366\u0367\u0005t\u0000\u0000\u0367\u0368\u0005a\u0000"+ - "\u0000\u0368\u0369\u0005d\u0000\u0000\u0369\u036a\u0005a\u0000\u0000\u036a"+ - "\u036b\u0005t\u0000\u0000\u036b\u036c\u0005a\u0000\u0000\u036c\u00c3\u0001"+ - "\u0000\u0000\u0000\u036d\u0371\b\n\u0000\u0000\u036e\u036f\u0005/\u0000"+ - "\u0000\u036f\u0371\b\u000b\u0000\u0000\u0370\u036d\u0001\u0000\u0000\u0000"+ - "\u0370\u036e\u0001\u0000\u0000\u0000\u0371\u00c5\u0001\u0000\u0000\u0000"+ - "\u0372\u0374\u0003\u00c4\\\u0000\u0373\u0372\u0001\u0000\u0000\u0000\u0374"+ - "\u0375\u0001\u0000\u0000\u0000\u0375\u0373\u0001\u0000\u0000\u0000\u0375"+ - "\u0376\u0001\u0000\u0000\u0000\u0376\u00c7\u0001\u0000\u0000\u0000\u0377"+ - "\u0378\u0003\u00acP\u0000\u0378\u0379\u0001\u0000\u0000\u0000\u0379\u037a"+ - "\u0006^\u0012\u0000\u037a\u00c9\u0001\u0000\u0000\u0000\u037b\u037c\u0003"+ - "2\u0013\u0000\u037c\u037d\u0001\u0000\u0000\u0000\u037d\u037e\u0006_\t"+ - "\u0000\u037e\u00cb\u0001\u0000\u0000\u0000\u037f\u0380\u00034\u0014\u0000"+ - "\u0380\u0381\u0001\u0000\u0000\u0000\u0381\u0382\u0006`\t\u0000\u0382"+ - "\u00cd\u0001\u0000\u0000\u0000\u0383\u0384\u00036\u0015\u0000\u0384\u0385"+ - "\u0001\u0000\u0000\u0000\u0385\u0386\u0006a\t\u0000\u0386\u00cf\u0001"+ - "\u0000\u0000\u0000\u0387\u0388\u0003B\u001b\u0000\u0388\u0389\u0001\u0000"+ - "\u0000\u0000\u0389\u038a\u0006b\f\u0000\u038a\u038b\u0006b\r\u0000\u038b"+ - "\u00d1\u0001\u0000\u0000\u0000\u038c\u038d\u0003l0\u0000\u038d\u038e\u0001"+ - "\u0000\u0000\u0000\u038e\u038f\u0006c\u0013\u0000\u038f\u00d3\u0001\u0000"+ - "\u0000\u0000\u0390\u0391\u0003h.\u0000\u0391\u0392\u0001\u0000\u0000\u0000"+ - "\u0392\u0393\u0006d\u000f\u0000\u0393\u00d5\u0001\u0000\u0000\u0000\u0394"+ - "\u0399\u0003F\u001d\u0000\u0395\u0399\u0003D\u001c\u0000\u0396\u0399\u0003"+ - "T$\u0000\u0397\u0399\u0003\u009eI\u0000\u0398\u0394\u0001\u0000\u0000"+ - "\u0000\u0398\u0395\u0001\u0000\u0000\u0000\u0398\u0396\u0001\u0000\u0000"+ - "\u0000\u0398\u0397\u0001\u0000\u0000\u0000\u0399\u00d7\u0001\u0000\u0000"+ - "\u0000\u039a\u039d\u0003F\u001d\u0000\u039b\u039d\u0003\u009eI\u0000\u039c"+ - "\u039a\u0001\u0000\u0000\u0000\u039c\u039b\u0001\u0000\u0000\u0000\u039d"+ - "\u03a1\u0001\u0000\u0000\u0000\u039e\u03a0\u0003\u00d6e\u0000\u039f\u039e"+ - "\u0001\u0000\u0000\u0000\u03a0\u03a3\u0001\u0000\u0000\u0000\u03a1\u039f"+ - "\u0001\u0000\u0000\u0000\u03a1\u03a2\u0001\u0000\u0000\u0000\u03a2\u03ae"+ - "\u0001\u0000\u0000\u0000\u03a3\u03a1\u0001\u0000\u0000\u0000\u03a4\u03a7"+ - "\u0003T$\u0000\u03a5\u03a7\u0003N!\u0000\u03a6\u03a4\u0001\u0000\u0000"+ - "\u0000\u03a6\u03a5\u0001\u0000\u0000\u0000\u03a7\u03a9\u0001\u0000\u0000"+ - "\u0000\u03a8\u03aa\u0003\u00d6e\u0000\u03a9\u03a8\u0001\u0000\u0000\u0000"+ - "\u03aa\u03ab\u0001\u0000\u0000\u0000\u03ab\u03a9\u0001\u0000\u0000\u0000"+ - "\u03ab\u03ac\u0001\u0000\u0000\u0000\u03ac\u03ae\u0001\u0000\u0000\u0000"+ - "\u03ad\u039c\u0001\u0000\u0000\u0000\u03ad\u03a6\u0001\u0000\u0000\u0000"+ - "\u03ae\u00d9\u0001\u0000\u0000\u0000\u03af\u03b2\u0003\u00d8f\u0000\u03b0"+ - "\u03b2\u0003\u00aaO\u0000\u03b1\u03af\u0001\u0000\u0000\u0000\u03b1\u03b0"+ - "\u0001\u0000\u0000\u0000\u03b2\u03b3\u0001\u0000\u0000\u0000\u03b3\u03b1"+ - "\u0001\u0000\u0000\u0000\u03b3\u03b4\u0001\u0000\u0000\u0000\u03b4\u00db"+ - "\u0001\u0000\u0000\u0000\u03b5\u03b6\u00032\u0013\u0000\u03b6\u03b7\u0001"+ - "\u0000\u0000\u0000\u03b7\u03b8\u0006h\t\u0000\u03b8\u00dd\u0001\u0000"+ - "\u0000\u0000\u03b9\u03ba\u00034\u0014\u0000\u03ba\u03bb\u0001\u0000\u0000"+ - "\u0000\u03bb\u03bc\u0006i\t\u0000\u03bc\u00df\u0001\u0000\u0000\u0000"+ - "\u03bd\u03be\u00036\u0015\u0000\u03be\u03bf\u0001\u0000\u0000\u0000\u03bf"+ - "\u03c0\u0006j\t\u0000\u03c0\u00e1\u0001\u0000\u0000\u0000\u03c1\u03c2"+ - "\u0003B\u001b\u0000\u03c2\u03c3\u0001\u0000\u0000\u0000\u03c3\u03c4\u0006"+ - "k\f\u0000\u03c4\u03c5\u0006k\r\u0000\u03c5\u00e3\u0001\u0000\u0000\u0000"+ - "\u03c6\u03c7\u0003d,\u0000\u03c7\u03c8\u0001\u0000\u0000\u0000\u03c8\u03c9"+ - "\u0006l\u0010\u0000\u03c9\u00e5\u0001\u0000\u0000\u0000\u03ca\u03cb\u0003"+ - "h.\u0000\u03cb\u03cc\u0001\u0000\u0000\u0000\u03cc\u03cd\u0006m\u000f"+ - "\u0000\u03cd\u00e7\u0001\u0000\u0000\u0000\u03ce\u03cf\u0003l0\u0000\u03cf"+ - "\u03d0\u0001\u0000\u0000\u0000\u03d0\u03d1\u0006n\u0013\u0000\u03d1\u00e9"+ - "\u0001\u0000\u0000\u0000\u03d2\u03d3\u0005a\u0000\u0000\u03d3\u03d4\u0005"+ - "s\u0000\u0000\u03d4\u00eb\u0001\u0000\u0000\u0000\u03d5\u03d6\u0003\u00da"+ - "g\u0000\u03d6\u03d7\u0001\u0000\u0000\u0000\u03d7\u03d8\u0006p\u0014\u0000"+ - "\u03d8\u00ed\u0001\u0000\u0000\u0000\u03d9\u03da\u00032\u0013\u0000\u03da"+ - "\u03db\u0001\u0000\u0000\u0000\u03db\u03dc\u0006q\t\u0000\u03dc\u00ef"+ - "\u0001\u0000\u0000\u0000\u03dd\u03de\u00034\u0014\u0000\u03de\u03df\u0001"+ - "\u0000\u0000\u0000\u03df\u03e0\u0006r\t\u0000\u03e0\u00f1\u0001\u0000"+ - "\u0000\u0000\u03e1\u03e2\u00036\u0015\u0000\u03e2\u03e3\u0001\u0000\u0000"+ - "\u0000\u03e3\u03e4\u0006s\t\u0000\u03e4\u00f3\u0001\u0000\u0000\u0000"+ - "\u03e5\u03e6\u0003B\u001b\u0000\u03e6\u03e7\u0001\u0000\u0000\u0000\u03e7"+ - "\u03e8\u0006t\f\u0000\u03e8\u03e9\u0006t\r\u0000\u03e9\u00f5\u0001\u0000"+ - "\u0000\u0000\u03ea\u03eb\u0003\u00a4L\u0000\u03eb\u03ec\u0001\u0000\u0000"+ - "\u0000\u03ec\u03ed\u0006u\n\u0000\u03ed\u03ee\u0006u\u0015\u0000\u03ee"+ - "\u00f7\u0001\u0000\u0000\u0000\u03ef\u03f0\u0005o\u0000\u0000\u03f0\u03f1"+ - "\u0005n\u0000\u0000\u03f1\u03f2\u0001\u0000\u0000\u0000\u03f2\u03f3\u0006"+ - "v\u0016\u0000\u03f3\u00f9\u0001\u0000\u0000\u0000\u03f4\u03f5\u0005w\u0000"+ - "\u0000\u03f5\u03f6\u0005i\u0000\u0000\u03f6\u03f7\u0005t\u0000\u0000\u03f7"+ - "\u03f8\u0005h\u0000\u0000\u03f8\u03f9\u0001\u0000\u0000\u0000\u03f9\u03fa"+ - "\u0006w\u0016\u0000\u03fa\u00fb\u0001\u0000\u0000\u0000\u03fb\u03fc\b"+ - "\f\u0000\u0000\u03fc\u00fd\u0001\u0000\u0000\u0000\u03fd\u03ff\u0003\u00fc"+ - "x\u0000\u03fe\u03fd\u0001\u0000\u0000\u0000\u03ff\u0400\u0001\u0000\u0000"+ - "\u0000\u0400\u03fe\u0001\u0000\u0000\u0000\u0400\u0401\u0001\u0000\u0000"+ - "\u0000\u0401\u0402\u0001\u0000\u0000\u0000\u0402\u0403\u0003\u0142\u009b"+ - "\u0000\u0403\u0405\u0001\u0000\u0000\u0000\u0404\u03fe\u0001\u0000\u0000"+ - "\u0000\u0404\u0405\u0001\u0000\u0000\u0000\u0405\u0407\u0001\u0000\u0000"+ - "\u0000\u0406\u0408\u0003\u00fcx\u0000\u0407\u0406\u0001\u0000\u0000\u0000"+ - "\u0408\u0409\u0001\u0000\u0000\u0000\u0409\u0407\u0001\u0000\u0000\u0000"+ - "\u0409\u040a\u0001\u0000\u0000\u0000\u040a\u00ff\u0001\u0000\u0000\u0000"+ - "\u040b\u040c\u0003\u00acP\u0000\u040c\u040d\u0001\u0000\u0000\u0000\u040d"+ - "\u040e\u0006z\u0012\u0000\u040e\u0101\u0001\u0000\u0000\u0000\u040f\u0410"+ - "\u0003\u00fey\u0000\u0410\u0411\u0001\u0000\u0000\u0000\u0411\u0412\u0006"+ - "{\u0017\u0000\u0412\u0103\u0001\u0000\u0000\u0000\u0413\u0414\u00032\u0013"+ - "\u0000\u0414\u0415\u0001\u0000\u0000\u0000\u0415\u0416\u0006|\t\u0000"+ - "\u0416\u0105\u0001\u0000\u0000\u0000\u0417\u0418\u00034\u0014\u0000\u0418"+ - "\u0419\u0001\u0000\u0000\u0000\u0419\u041a\u0006}\t\u0000\u041a\u0107"+ - "\u0001\u0000\u0000\u0000\u041b\u041c\u00036\u0015\u0000\u041c\u041d\u0001"+ - "\u0000\u0000\u0000\u041d\u041e\u0006~\t\u0000\u041e\u0109\u0001\u0000"+ - "\u0000\u0000\u041f\u0420\u0003B\u001b\u0000\u0420\u0421\u0001\u0000\u0000"+ - "\u0000\u0421\u0422\u0006\u007f\f\u0000\u0422\u0423\u0006\u007f\r\u0000"+ - "\u0423\u0424\u0006\u007f\r\u0000\u0424\u010b\u0001\u0000\u0000\u0000\u0425"+ - "\u0426\u0003d,\u0000\u0426\u0427\u0001\u0000\u0000\u0000\u0427\u0428\u0006"+ - "\u0080\u0010\u0000\u0428\u010d\u0001\u0000\u0000\u0000\u0429\u042a\u0003"+ - "h.\u0000\u042a\u042b\u0001\u0000\u0000\u0000\u042b\u042c\u0006\u0081\u000f"+ - "\u0000\u042c\u010f\u0001\u0000\u0000\u0000\u042d\u042e\u0003l0\u0000\u042e"+ - "\u042f\u0001\u0000\u0000\u0000\u042f\u0430\u0006\u0082\u0013\u0000\u0430"+ - "\u0111\u0001\u0000\u0000\u0000\u0431\u0432\u0003\u00faw\u0000\u0432\u0433"+ - "\u0001\u0000\u0000\u0000\u0433\u0434\u0006\u0083\u0018\u0000\u0434\u0113"+ - "\u0001\u0000\u0000\u0000\u0435\u0436\u0003\u00dag\u0000\u0436\u0437\u0001"+ - "\u0000\u0000\u0000\u0437\u0438\u0006\u0084\u0014\u0000\u0438\u0115\u0001"+ - "\u0000\u0000\u0000\u0439\u043a\u0003\u00acP\u0000\u043a\u043b\u0001\u0000"+ - "\u0000\u0000\u043b\u043c\u0006\u0085\u0012\u0000\u043c\u0117\u0001\u0000"+ - "\u0000\u0000\u043d\u043e\u00032\u0013\u0000\u043e\u043f\u0001\u0000\u0000"+ - "\u0000\u043f\u0440\u0006\u0086\t\u0000\u0440\u0119\u0001\u0000\u0000\u0000"+ - "\u0441\u0442\u00034\u0014\u0000\u0442\u0443\u0001\u0000\u0000\u0000\u0443"+ - "\u0444\u0006\u0087\t\u0000\u0444\u011b\u0001\u0000\u0000\u0000\u0445\u0446"+ - "\u00036\u0015\u0000\u0446\u0447\u0001\u0000\u0000\u0000\u0447\u0448\u0006"+ - "\u0088\t\u0000\u0448\u011d\u0001\u0000\u0000\u0000\u0449\u044a\u0003B"+ - "\u001b\u0000\u044a\u044b\u0001\u0000\u0000\u0000\u044b\u044c\u0006\u0089"+ - "\f\u0000\u044c\u044d\u0006\u0089\r\u0000\u044d\u011f\u0001\u0000\u0000"+ - "\u0000\u044e\u044f\u0003l0\u0000\u044f\u0450\u0001\u0000\u0000\u0000\u0450"+ - "\u0451\u0006\u008a\u0013\u0000\u0451\u0121\u0001\u0000\u0000\u0000\u0452"+ - "\u0453\u0003\u00acP\u0000\u0453\u0454\u0001\u0000\u0000\u0000\u0454\u0455"+ - "\u0006\u008b\u0012\u0000\u0455\u0123\u0001\u0000\u0000\u0000\u0456\u0457"+ - "\u0003\u00a8N\u0000\u0457\u0458\u0001\u0000\u0000\u0000\u0458\u0459\u0006"+ - "\u008c\u0019\u0000\u0459\u0125\u0001\u0000\u0000\u0000\u045a\u045b\u0003"+ - "2\u0013\u0000\u045b\u045c\u0001\u0000\u0000\u0000\u045c\u045d\u0006\u008d"+ - "\t\u0000\u045d\u0127\u0001\u0000\u0000\u0000\u045e\u045f\u00034\u0014"+ - "\u0000\u045f\u0460\u0001\u0000\u0000\u0000\u0460\u0461\u0006\u008e\t\u0000"+ - "\u0461\u0129\u0001\u0000\u0000\u0000\u0462\u0463\u00036\u0015\u0000\u0463"+ - "\u0464\u0001\u0000\u0000\u0000\u0464\u0465\u0006\u008f\t\u0000\u0465\u012b"+ - "\u0001\u0000\u0000\u0000\u0466\u0467\u0003B\u001b\u0000\u0467\u0468\u0001"+ - "\u0000\u0000\u0000\u0468\u0469\u0006\u0090\f\u0000\u0469\u046a\u0006\u0090"+ - "\r\u0000\u046a\u012d\u0001\u0000\u0000\u0000\u046b\u046c\u0005i\u0000"+ - "\u0000\u046c\u046d\u0005n\u0000\u0000\u046d\u046e\u0005f\u0000\u0000\u046e"+ - "\u046f\u0005o\u0000\u0000\u046f\u012f\u0001\u0000\u0000\u0000\u0470\u0471"+ - "\u00032\u0013\u0000\u0471\u0472\u0001\u0000\u0000\u0000\u0472\u0473\u0006"+ - "\u0092\t\u0000\u0473\u0131\u0001\u0000\u0000\u0000\u0474\u0475\u00034"+ - "\u0014\u0000\u0475\u0476\u0001\u0000\u0000\u0000\u0476\u0477\u0006\u0093"+ - "\t\u0000\u0477\u0133\u0001\u0000\u0000\u0000\u0478\u0479\u00036\u0015"+ - "\u0000\u0479\u047a\u0001\u0000\u0000\u0000\u047a\u047b\u0006\u0094\t\u0000"+ - "\u047b\u0135\u0001\u0000\u0000\u0000\u047c\u047d\u0003B\u001b\u0000\u047d"+ - "\u047e\u0001\u0000\u0000\u0000\u047e\u047f\u0006\u0095\f\u0000\u047f\u0480"+ - "\u0006\u0095\r\u0000\u0480\u0137\u0001\u0000\u0000\u0000\u0481\u0482\u0005"+ - "f\u0000\u0000\u0482\u0483\u0005u\u0000\u0000\u0483\u0484\u0005n\u0000"+ - "\u0000\u0484\u0485\u0005c\u0000\u0000\u0485\u0486\u0005t\u0000\u0000\u0486"+ - "\u0487\u0005i\u0000\u0000\u0487\u0488\u0005o\u0000\u0000\u0488\u0489\u0005"+ - "n\u0000\u0000\u0489\u048a\u0005s\u0000\u0000\u048a\u0139\u0001\u0000\u0000"+ - "\u0000\u048b\u048c\u00032\u0013\u0000\u048c\u048d\u0001\u0000\u0000\u0000"+ - "\u048d\u048e\u0006\u0097\t\u0000\u048e\u013b\u0001\u0000\u0000\u0000\u048f"+ - "\u0490\u00034\u0014\u0000\u0490\u0491\u0001\u0000\u0000\u0000\u0491\u0492"+ - "\u0006\u0098\t\u0000\u0492\u013d\u0001\u0000\u0000\u0000\u0493\u0494\u0003"+ - "6\u0015\u0000\u0494\u0495\u0001\u0000\u0000\u0000\u0495\u0496\u0006\u0099"+ - "\t\u0000\u0496\u013f\u0001\u0000\u0000\u0000\u0497\u0498\u0003\u00a6M"+ - "\u0000\u0498\u0499\u0001\u0000\u0000\u0000\u0499\u049a\u0006\u009a\u000e"+ - "\u0000\u049a\u049b\u0006\u009a\r\u0000\u049b\u0141\u0001\u0000\u0000\u0000"+ - "\u049c\u049d\u0005:\u0000\u0000\u049d\u0143\u0001\u0000\u0000\u0000\u049e"+ - "\u04a4\u0003N!\u0000\u049f\u04a4\u0003D\u001c\u0000\u04a0\u04a4\u0003"+ - "l0\u0000\u04a1\u04a4\u0003F\u001d\u0000\u04a2\u04a4\u0003T$\u0000\u04a3"+ - "\u049e\u0001\u0000\u0000\u0000\u04a3\u049f\u0001\u0000\u0000\u0000\u04a3"+ - "\u04a0\u0001\u0000\u0000\u0000\u04a3\u04a1\u0001\u0000\u0000\u0000\u04a3"+ - "\u04a2\u0001\u0000\u0000\u0000\u04a4\u04a5\u0001\u0000\u0000\u0000\u04a5"+ - "\u04a3\u0001\u0000\u0000\u0000\u04a5\u04a6\u0001\u0000\u0000\u0000\u04a6"+ - "\u0145\u0001\u0000\u0000\u0000\u04a7\u04a8\u00032\u0013\u0000\u04a8\u04a9"+ - "\u0001\u0000\u0000\u0000\u04a9\u04aa\u0006\u009d\t\u0000\u04aa\u0147\u0001"+ - "\u0000\u0000\u0000\u04ab\u04ac\u00034\u0014\u0000\u04ac\u04ad\u0001\u0000"+ - "\u0000\u0000\u04ad\u04ae\u0006\u009e\t\u0000\u04ae\u0149\u0001\u0000\u0000"+ - "\u0000\u04af\u04b0\u00036\u0015\u0000\u04b0\u04b1\u0001\u0000\u0000\u0000"+ - "\u04b1\u04b2\u0006\u009f\t\u0000\u04b2\u014b\u0001\u0000\u0000\u0000:"+ - "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\u01e5\u01ef"+ - "\u01f3\u01f6\u01ff\u0201\u020c\u0235\u023a\u0243\u024a\u024f\u0251\u025c"+ - "\u0264\u0267\u0269\u026e\u0273\u0279\u0280\u0285\u028b\u028e\u0296\u029a"+ - "\u031f\u0324\u0329\u032b\u0331\u0370\u0375\u0398\u039c\u03a1\u03a6\u03ab"+ - "\u03ad\u03b1\u03b3\u0400\u0404\u0409\u04a3\u04a5\u001a\u0005\u0002\u0000"+ - "\u0005\u0004\u0000\u0005\u0006\u0000\u0005\u0001\u0000\u0005\u0003\u0000"+ - "\u0005\n\u0000\u0005\b\u0000\u0005\u0005\u0000\u0005\t\u0000\u0000\u0001"+ - "\u0000\u0007A\u0000\u0005\u0000\u0000\u0007\u001a\u0000\u0004\u0000\u0000"+ - "\u0007B\u0000\u0007#\u0000\u0007!\u0000\u0007\u001b\u0000\u0007D\u0000"+ - "\u0007%\u0000\u0007N\u0000\u0005\u000b\u0000\u0005\u0007\u0000\u0007X"+ - "\u0000\u0007W\u0000\u0007C\u0000"; + "\u0012\u0004\u0012\u01e2\b\u0012\u000b\u0012\f\u0012\u01e3\u0001\u0012"+ + "\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0005\u0013"+ + "\u01ec\b\u0013\n\u0013\f\u0013\u01ef\t\u0013\u0001\u0013\u0003\u0013\u01f2"+ + "\b\u0013\u0001\u0013\u0003\u0013\u01f5\b\u0013\u0001\u0013\u0001\u0013"+ + "\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0005\u0014"+ + "\u01fe\b\u0014\n\u0014\f\u0014\u0201\t\u0014\u0001\u0014\u0001\u0014\u0001"+ + "\u0014\u0001\u0014\u0001\u0014\u0001\u0015\u0004\u0015\u0209\b\u0015\u000b"+ + "\u0015\f\u0015\u020a\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001"+ + "\u0016\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ + "\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0001"+ + "\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0001"+ + "\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0001\u001b\u0001"+ + "\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001"+ + "\u001e\u0001\u001f\u0001\u001f\u0001 \u0001 \u0003 \u0234\b \u0001 \u0004"+ + " \u0237\b \u000b \f \u0238\u0001!\u0001!\u0001\"\u0001\"\u0001#\u0001"+ + "#\u0001#\u0003#\u0242\b#\u0001$\u0001$\u0001%\u0001%\u0001%\u0003%\u0249"+ + "\b%\u0001&\u0001&\u0001&\u0005&\u024e\b&\n&\f&\u0251\t&\u0001&\u0001&"+ + "\u0001&\u0001&\u0001&\u0001&\u0005&\u0259\b&\n&\f&\u025c\t&\u0001&\u0001"+ + "&\u0001&\u0001&\u0001&\u0003&\u0263\b&\u0001&\u0003&\u0266\b&\u0003&\u0268"+ + "\b&\u0001\'\u0004\'\u026b\b\'\u000b\'\f\'\u026c\u0001(\u0004(\u0270\b"+ + "(\u000b(\f(\u0271\u0001(\u0001(\u0005(\u0276\b(\n(\f(\u0279\t(\u0001("+ + "\u0001(\u0004(\u027d\b(\u000b(\f(\u027e\u0001(\u0004(\u0282\b(\u000b("+ + "\f(\u0283\u0001(\u0001(\u0005(\u0288\b(\n(\f(\u028b\t(\u0003(\u028d\b"+ + "(\u0001(\u0001(\u0001(\u0001(\u0004(\u0293\b(\u000b(\f(\u0294\u0001(\u0001"+ + "(\u0003(\u0299\b(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0001*\u0001"+ + "+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001-\u0001-\u0001-\u0001.\u0001"+ + ".\u0001/\u0001/\u0001/\u0001/\u0001/\u00010\u00010\u00011\u00011\u0001"+ + "1\u00011\u00011\u00011\u00012\u00012\u00012\u00012\u00012\u00012\u0001"+ + "3\u00013\u00013\u00013\u00013\u00014\u00014\u00015\u00015\u00015\u0001"+ + "6\u00016\u00016\u00017\u00017\u00017\u00017\u00017\u00018\u00018\u0001"+ + "8\u00018\u00019\u00019\u00019\u00019\u00019\u0001:\u0001:\u0001:\u0001"+ + ":\u0001:\u0001:\u0001;\u0001;\u0001;\u0001<\u0001<\u0001=\u0001=\u0001"+ + "=\u0001=\u0001=\u0001=\u0001>\u0001>\u0001?\u0001?\u0001?\u0001?\u0001"+ + "?\u0001@\u0001@\u0001@\u0001A\u0001A\u0001A\u0001B\u0001B\u0001B\u0001"+ + "C\u0001C\u0001D\u0001D\u0001D\u0001E\u0001E\u0001F\u0001F\u0001F\u0001"+ + "G\u0001G\u0001H\u0001H\u0001I\u0001I\u0001J\u0001J\u0001K\u0001K\u0001"+ + "L\u0001L\u0001L\u0001L\u0001L\u0001M\u0001M\u0001M\u0001M\u0001M\u0001"+ + "N\u0001N\u0005N\u031c\bN\nN\fN\u031f\tN\u0001N\u0001N\u0003N\u0323\bN"+ + "\u0001N\u0004N\u0326\bN\u000bN\fN\u0327\u0003N\u032a\bN\u0001O\u0001O"+ + "\u0004O\u032e\bO\u000bO\fO\u032f\u0001O\u0001O\u0001P\u0001P\u0001Q\u0001"+ + "Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001R\u0001S\u0001S\u0001S\u0001"+ + "S\u0001T\u0001T\u0001T\u0001T\u0001T\u0001U\u0001U\u0001U\u0001U\u0001"+ + "V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001W\u0001W\u0001X\u0001X\u0001"+ + "X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001"+ + "Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001[\u0001[\u0001"+ + "[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0003\\\u036f\b\\\u0001]\u0004"+ + "]\u0372\b]\u000b]\f]\u0373\u0001^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001"+ + "_\u0001_\u0001`\u0001`\u0001`\u0001`\u0001a\u0001a\u0001a\u0001a\u0001"+ + "a\u0001b\u0001b\u0001b\u0001b\u0001c\u0001c\u0001c\u0001c\u0001d\u0001"+ + "d\u0001d\u0001d\u0003d\u0393\bd\u0001e\u0001e\u0003e\u0397\be\u0001e\u0005"+ + "e\u039a\be\ne\fe\u039d\te\u0001e\u0001e\u0003e\u03a1\be\u0001e\u0004e"+ + "\u03a4\be\u000be\fe\u03a5\u0003e\u03a8\be\u0001f\u0001f\u0004f\u03ac\b"+ + "f\u000bf\ff\u03ad\u0001g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001"+ + "h\u0001i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001j\u0001j\u0001"+ + "k\u0001k\u0001k\u0001k\u0001l\u0001l\u0001l\u0001l\u0001m\u0001m\u0001"+ + "m\u0001m\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001p\u0001"+ + "p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001r\u0001r\u0001r\u0001"+ + "r\u0001s\u0001s\u0001s\u0001s\u0001s\u0001t\u0001t\u0001t\u0001t\u0001"+ + "t\u0001u\u0001u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001v\u0001v\u0001"+ + "v\u0001v\u0001v\u0001w\u0001w\u0001x\u0004x\u03f9\bx\u000bx\fx\u03fa\u0001"+ + "x\u0001x\u0003x\u03ff\bx\u0001x\u0004x\u0402\bx\u000bx\fx\u0403\u0001"+ + "y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001{\u0001{\u0001"+ + "{\u0001{\u0001|\u0001|\u0001|\u0001|\u0001}\u0001}\u0001}\u0001}\u0001"+ + "~\u0001~\u0001~\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f"+ + "\u0001\u007f\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0081"+ + "\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0082\u0001\u0082\u0001\u0082"+ + "\u0001\u0082\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0084"+ + "\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0085"+ + "\u0001\u0085\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0087"+ + "\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088"+ + "\u0001\u0088\u0001\u0088\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089"+ + "\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b"+ + "\u0001\u008b\u0001\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c"+ + "\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e"+ + "\u0001\u008e\u0001\u008e\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f"+ + "\u0001\u008f\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0090"+ + "\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0092\u0001\u0092"+ + "\u0001\u0092\u0001\u0092\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093"+ + "\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0095"+ + "\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095"+ + "\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0096\u0001\u0096\u0001\u0096"+ + "\u0001\u0096\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0098"+ + "\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0099\u0001\u0099\u0001\u0099"+ + "\u0001\u0099\u0001\u0099\u0001\u009a\u0001\u009a\u0001\u009b\u0001\u009b"+ + "\u0001\u009b\u0001\u009b\u0001\u009b\u0004\u009b\u049e\b\u009b\u000b\u009b"+ + "\f\u009b\u049f\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009d"+ + "\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009e\u0001\u009e\u0001\u009e"+ + "\u0001\u009e\u0002\u01ff\u025a\u0000\u009f\f\u0001\u000e\u0002\u0010\u0003"+ + "\u0012\u0004\u0014\u0005\u0016\u0006\u0018\u0007\u001a\b\u001c\t\u001e"+ + "\n \u000b\"\f$\r&\u000e(\u000f*\u0010,\u0011.\u00120\u00132\u00144\u0015"+ + "6\u00168\u0000:\u0000<\u0017>\u0018@\u0019B\u001aD\u0000F\u0000H\u0000"+ + "J\u0000L\u0000N\u0000P\u0000R\u0000T\u0000V\u0000X\u001bZ\u001c\\\u001d"+ + "^\u001e`\u001fb d!f\"h#j$l%n&p\'r(t)v*x+z,|-~.\u0080/\u00820\u00841\u0086"+ + "2\u00883\u008a4\u008c5\u008e6\u00907\u00928\u00949\u0096:\u0098;\u009a"+ + "<\u009c=\u009e>\u00a0?\u00a2@\u00a4A\u00a6B\u00a8C\u00aa\u0000\u00acD"+ + "\u00aeE\u00b0F\u00b2G\u00b4\u0000\u00b6\u0000\u00b8\u0000\u00ba\u0000"+ + "\u00bc\u0000\u00be\u0000\u00c0H\u00c2I\u00c4\u0000\u00c6J\u00c8K\u00ca"+ + "L\u00ccM\u00ce\u0000\u00d0\u0000\u00d2\u0000\u00d4\u0000\u00d6\u0000\u00d8"+ + "N\u00daO\u00dcP\u00deQ\u00e0\u0000\u00e2\u0000\u00e4\u0000\u00e6\u0000"+ + "\u00e8R\u00ea\u0000\u00ecS\u00eeT\u00f0U\u00f2\u0000\u00f4\u0000\u00f6"+ + "V\u00f8W\u00fa\u0000\u00fcX\u00fe\u0000\u0100\u0000\u0102Y\u0104Z\u0106"+ + "[\u0108\u0000\u010a\u0000\u010c\u0000\u010e\u0000\u0110\u0000\u0112\u0000"+ + "\u0114\u0000\u0116\\\u0118]\u011a^\u011c\u0000\u011e\u0000\u0120\u0000"+ + "\u0122\u0000\u0124_\u0126`\u0128a\u012a\u0000\u012cb\u012ec\u0130d\u0132"+ + "e\u0134\u0000\u0136f\u0138g\u013ah\u013ci\u013e\u0000\u0140j\u0142k\u0144"+ + "l\u0146m\u0148n\f\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t"+ + "\n\u000b\r\u0006\u0000\t\n\r\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000"+ + "\t\n\r\r \u0001\u000009\u0002\u0000AZaz\u0005\u0000\"\"\\\\nnrrtt\u0004"+ + "\u0000\n\n\r\r\"\"\\\\\u0002\u0000EEee\u0002\u0000++--\u0001\u0000``\n"+ + "\u0000\t\n\r\r ,,//==[[]]``||\u0002\u0000**//\u000b\u0000\t\n\r\r \""+ + "#,,//::<<>?\\\\||\u04c8\u0000\f\u0001\u0000\u0000\u0000\u0000\u000e\u0001"+ + "\u0000\u0000\u0000\u0000\u0010\u0001\u0000\u0000\u0000\u0000\u0012\u0001"+ + "\u0000\u0000\u0000\u0000\u0014\u0001\u0000\u0000\u0000\u0000\u0016\u0001"+ + "\u0000\u0000\u0000\u0000\u0018\u0001\u0000\u0000\u0000\u0000\u001a\u0001"+ + "\u0000\u0000\u0000\u0000\u001c\u0001\u0000\u0000\u0000\u0000\u001e\u0001"+ + "\u0000\u0000\u0000\u0000 \u0001\u0000\u0000\u0000\u0000\"\u0001\u0000"+ + "\u0000\u0000\u0000$\u0001\u0000\u0000\u0000\u0000&\u0001\u0000\u0000\u0000"+ + "\u0000(\u0001\u0000\u0000\u0000\u0000*\u0001\u0000\u0000\u0000\u0000,"+ + "\u0001\u0000\u0000\u0000\u0000.\u0001\u0000\u0000\u0000\u00000\u0001\u0000"+ + "\u0000\u0000\u00002\u0001\u0000\u0000\u0000\u00004\u0001\u0000\u0000\u0000"+ + "\u00006\u0001\u0000\u0000\u0000\u00018\u0001\u0000\u0000\u0000\u0001:"+ + "\u0001\u0000\u0000\u0000\u0001<\u0001\u0000\u0000\u0000\u0001>\u0001\u0000"+ + "\u0000\u0000\u0001@\u0001\u0000\u0000\u0000\u0002B\u0001\u0000\u0000\u0000"+ + "\u0002X\u0001\u0000\u0000\u0000\u0002Z\u0001\u0000\u0000\u0000\u0002\\"+ + "\u0001\u0000\u0000\u0000\u0002^\u0001\u0000\u0000\u0000\u0002`\u0001\u0000"+ + "\u0000\u0000\u0002b\u0001\u0000\u0000\u0000\u0002d\u0001\u0000\u0000\u0000"+ + "\u0002f\u0001\u0000\u0000\u0000\u0002h\u0001\u0000\u0000\u0000\u0002j"+ + "\u0001\u0000\u0000\u0000\u0002l\u0001\u0000\u0000\u0000\u0002n\u0001\u0000"+ + "\u0000\u0000\u0002p\u0001\u0000\u0000\u0000\u0002r\u0001\u0000\u0000\u0000"+ + "\u0002t\u0001\u0000\u0000\u0000\u0002v\u0001\u0000\u0000\u0000\u0002x"+ + "\u0001\u0000\u0000\u0000\u0002z\u0001\u0000\u0000\u0000\u0002|\u0001\u0000"+ + "\u0000\u0000\u0002~\u0001\u0000\u0000\u0000\u0002\u0080\u0001\u0000\u0000"+ + "\u0000\u0002\u0082\u0001\u0000\u0000\u0000\u0002\u0084\u0001\u0000\u0000"+ + "\u0000\u0002\u0086\u0001\u0000\u0000\u0000\u0002\u0088\u0001\u0000\u0000"+ + "\u0000\u0002\u008a\u0001\u0000\u0000\u0000\u0002\u008c\u0001\u0000\u0000"+ + "\u0000\u0002\u008e\u0001\u0000\u0000\u0000\u0002\u0090\u0001\u0000\u0000"+ + "\u0000\u0002\u0092\u0001\u0000\u0000\u0000\u0002\u0094\u0001\u0000\u0000"+ + "\u0000\u0002\u0096\u0001\u0000\u0000\u0000\u0002\u0098\u0001\u0000\u0000"+ + "\u0000\u0002\u009a\u0001\u0000\u0000\u0000\u0002\u009c\u0001\u0000\u0000"+ + "\u0000\u0002\u009e\u0001\u0000\u0000\u0000\u0002\u00a0\u0001\u0000\u0000"+ + "\u0000\u0002\u00a2\u0001\u0000\u0000\u0000\u0002\u00a4\u0001\u0000\u0000"+ + "\u0000\u0002\u00a6\u0001\u0000\u0000\u0000\u0002\u00a8\u0001\u0000\u0000"+ + "\u0000\u0002\u00ac\u0001\u0000\u0000\u0000\u0002\u00ae\u0001\u0000\u0000"+ + "\u0000\u0002\u00b0\u0001\u0000\u0000\u0000\u0002\u00b2\u0001\u0000\u0000"+ + "\u0000\u0003\u00b4\u0001\u0000\u0000\u0000\u0003\u00b6\u0001\u0000\u0000"+ + "\u0000\u0003\u00b8\u0001\u0000\u0000\u0000\u0003\u00ba\u0001\u0000\u0000"+ + "\u0000\u0003\u00bc\u0001\u0000\u0000\u0000\u0003\u00be\u0001\u0000\u0000"+ + "\u0000\u0003\u00c0\u0001\u0000\u0000\u0000\u0003\u00c2\u0001\u0000\u0000"+ + "\u0000\u0003\u00c6\u0001\u0000\u0000\u0000\u0003\u00c8\u0001\u0000\u0000"+ + "\u0000\u0003\u00ca\u0001\u0000\u0000\u0000\u0003\u00cc\u0001\u0000\u0000"+ + "\u0000\u0004\u00ce\u0001\u0000\u0000\u0000\u0004\u00d0\u0001\u0000\u0000"+ + "\u0000\u0004\u00d2\u0001\u0000\u0000\u0000\u0004\u00d8\u0001\u0000\u0000"+ + "\u0000\u0004\u00da\u0001\u0000\u0000\u0000\u0004\u00dc\u0001\u0000\u0000"+ + "\u0000\u0004\u00de\u0001\u0000\u0000\u0000\u0005\u00e0\u0001\u0000\u0000"+ + "\u0000\u0005\u00e2\u0001\u0000\u0000\u0000\u0005\u00e4\u0001\u0000\u0000"+ + "\u0000\u0005\u00e6\u0001\u0000\u0000\u0000\u0005\u00e8\u0001\u0000\u0000"+ + "\u0000\u0005\u00ea\u0001\u0000\u0000\u0000\u0005\u00ec\u0001\u0000\u0000"+ + "\u0000\u0005\u00ee\u0001\u0000\u0000\u0000\u0005\u00f0\u0001\u0000\u0000"+ + "\u0000\u0006\u00f2\u0001\u0000\u0000\u0000\u0006\u00f4\u0001\u0000\u0000"+ + "\u0000\u0006\u00f6\u0001\u0000\u0000\u0000\u0006\u00f8\u0001\u0000\u0000"+ + "\u0000\u0006\u00fc\u0001\u0000\u0000\u0000\u0006\u00fe\u0001\u0000\u0000"+ + "\u0000\u0006\u0100\u0001\u0000\u0000\u0000\u0006\u0102\u0001\u0000\u0000"+ + "\u0000\u0006\u0104\u0001\u0000\u0000\u0000\u0006\u0106\u0001\u0000\u0000"+ + "\u0000\u0007\u0108\u0001\u0000\u0000\u0000\u0007\u010a\u0001\u0000\u0000"+ + "\u0000\u0007\u010c\u0001\u0000\u0000\u0000\u0007\u010e\u0001\u0000\u0000"+ + "\u0000\u0007\u0110\u0001\u0000\u0000\u0000\u0007\u0112\u0001\u0000\u0000"+ + "\u0000\u0007\u0114\u0001\u0000\u0000\u0000\u0007\u0116\u0001\u0000\u0000"+ + "\u0000\u0007\u0118\u0001\u0000\u0000\u0000\u0007\u011a\u0001\u0000\u0000"+ + "\u0000\b\u011c\u0001\u0000\u0000\u0000\b\u011e\u0001\u0000\u0000\u0000"+ + "\b\u0120\u0001\u0000\u0000\u0000\b\u0122\u0001\u0000\u0000\u0000\b\u0124"+ + "\u0001\u0000\u0000\u0000\b\u0126\u0001\u0000\u0000\u0000\b\u0128\u0001"+ + "\u0000\u0000\u0000\t\u012a\u0001\u0000\u0000\u0000\t\u012c\u0001\u0000"+ + "\u0000\u0000\t\u012e\u0001\u0000\u0000\u0000\t\u0130\u0001\u0000\u0000"+ + "\u0000\t\u0132\u0001\u0000\u0000\u0000\n\u0134\u0001\u0000\u0000\u0000"+ + "\n\u0136\u0001\u0000\u0000\u0000\n\u0138\u0001\u0000\u0000\u0000\n\u013a"+ + "\u0001\u0000\u0000\u0000\n\u013c\u0001\u0000\u0000\u0000\u000b\u013e\u0001"+ + "\u0000\u0000\u0000\u000b\u0140\u0001\u0000\u0000\u0000\u000b\u0142\u0001"+ + "\u0000\u0000\u0000\u000b\u0144\u0001\u0000\u0000\u0000\u000b\u0146\u0001"+ + "\u0000\u0000\u0000\u000b\u0148\u0001\u0000\u0000\u0000\f\u014a\u0001\u0000"+ + "\u0000\u0000\u000e\u0154\u0001\u0000\u0000\u0000\u0010\u015b\u0001\u0000"+ + "\u0000\u0000\u0012\u0164\u0001\u0000\u0000\u0000\u0014\u016b\u0001\u0000"+ + "\u0000\u0000\u0016\u0175\u0001\u0000\u0000\u0000\u0018\u017c\u0001\u0000"+ + "\u0000\u0000\u001a\u0183\u0001\u0000\u0000\u0000\u001c\u0191\u0001\u0000"+ + "\u0000\u0000\u001e\u0198\u0001\u0000\u0000\u0000 \u01a0\u0001\u0000\u0000"+ + "\u0000\"\u01a7\u0001\u0000\u0000\u0000$\u01b3\u0001\u0000\u0000\u0000"+ + "&\u01bc\u0001\u0000\u0000\u0000(\u01c2\u0001\u0000\u0000\u0000*\u01c9"+ + "\u0001\u0000\u0000\u0000,\u01d0\u0001\u0000\u0000\u0000.\u01d8\u0001\u0000"+ + "\u0000\u00000\u01e1\u0001\u0000\u0000\u00002\u01e7\u0001\u0000\u0000\u0000"+ + "4\u01f8\u0001\u0000\u0000\u00006\u0208\u0001\u0000\u0000\u00008\u020e"+ + "\u0001\u0000\u0000\u0000:\u0213\u0001\u0000\u0000\u0000<\u0218\u0001\u0000"+ + "\u0000\u0000>\u021c\u0001\u0000\u0000\u0000@\u0220\u0001\u0000\u0000\u0000"+ + "B\u0224\u0001\u0000\u0000\u0000D\u0228\u0001\u0000\u0000\u0000F\u022a"+ + "\u0001\u0000\u0000\u0000H\u022c\u0001\u0000\u0000\u0000J\u022f\u0001\u0000"+ + "\u0000\u0000L\u0231\u0001\u0000\u0000\u0000N\u023a\u0001\u0000\u0000\u0000"+ + "P\u023c\u0001\u0000\u0000\u0000R\u0241\u0001\u0000\u0000\u0000T\u0243"+ + "\u0001\u0000\u0000\u0000V\u0248\u0001\u0000\u0000\u0000X\u0267\u0001\u0000"+ + "\u0000\u0000Z\u026a\u0001\u0000\u0000\u0000\\\u0298\u0001\u0000\u0000"+ + "\u0000^\u029a\u0001\u0000\u0000\u0000`\u029d\u0001\u0000\u0000\u0000b"+ + "\u02a1\u0001\u0000\u0000\u0000d\u02a5\u0001\u0000\u0000\u0000f\u02a7\u0001"+ + "\u0000\u0000\u0000h\u02aa\u0001\u0000\u0000\u0000j\u02ac\u0001\u0000\u0000"+ + "\u0000l\u02b1\u0001\u0000\u0000\u0000n\u02b3\u0001\u0000\u0000\u0000p"+ + "\u02b9\u0001\u0000\u0000\u0000r\u02bf\u0001\u0000\u0000\u0000t\u02c4\u0001"+ + "\u0000\u0000\u0000v\u02c6\u0001\u0000\u0000\u0000x\u02c9\u0001\u0000\u0000"+ + "\u0000z\u02cc\u0001\u0000\u0000\u0000|\u02d1\u0001\u0000\u0000\u0000~"+ + "\u02d5\u0001\u0000\u0000\u0000\u0080\u02da\u0001\u0000\u0000\u0000\u0082"+ + "\u02e0\u0001\u0000\u0000\u0000\u0084\u02e3\u0001\u0000\u0000\u0000\u0086"+ + "\u02e5\u0001\u0000\u0000\u0000\u0088\u02eb\u0001\u0000\u0000\u0000\u008a"+ + "\u02ed\u0001\u0000\u0000\u0000\u008c\u02f2\u0001\u0000\u0000\u0000\u008e"+ + "\u02f5\u0001\u0000\u0000\u0000\u0090\u02f8\u0001\u0000\u0000\u0000\u0092"+ + "\u02fb\u0001\u0000\u0000\u0000\u0094\u02fd\u0001\u0000\u0000\u0000\u0096"+ + "\u0300\u0001\u0000\u0000\u0000\u0098\u0302\u0001\u0000\u0000\u0000\u009a"+ + "\u0305\u0001\u0000\u0000\u0000\u009c\u0307\u0001\u0000\u0000\u0000\u009e"+ + "\u0309\u0001\u0000\u0000\u0000\u00a0\u030b\u0001\u0000\u0000\u0000\u00a2"+ + "\u030d\u0001\u0000\u0000\u0000\u00a4\u030f\u0001\u0000\u0000\u0000\u00a6"+ + "\u0314\u0001\u0000\u0000\u0000\u00a8\u0329\u0001\u0000\u0000\u0000\u00aa"+ + "\u032b\u0001\u0000\u0000\u0000\u00ac\u0333\u0001\u0000\u0000\u0000\u00ae"+ + "\u0335\u0001\u0000\u0000\u0000\u00b0\u0339\u0001\u0000\u0000\u0000\u00b2"+ + "\u033d\u0001\u0000\u0000\u0000\u00b4\u0341\u0001\u0000\u0000\u0000\u00b6"+ + "\u0346\u0001\u0000\u0000\u0000\u00b8\u034a\u0001\u0000\u0000\u0000\u00ba"+ + "\u034e\u0001\u0000\u0000\u0000\u00bc\u0352\u0001\u0000\u0000\u0000\u00be"+ + "\u0356\u0001\u0000\u0000\u0000\u00c0\u035a\u0001\u0000\u0000\u0000\u00c2"+ + "\u0362\u0001\u0000\u0000\u0000\u00c4\u036e\u0001\u0000\u0000\u0000\u00c6"+ + "\u0371\u0001\u0000\u0000\u0000\u00c8\u0375\u0001\u0000\u0000\u0000\u00ca"+ + "\u0379\u0001\u0000\u0000\u0000\u00cc\u037d\u0001\u0000\u0000\u0000\u00ce"+ + "\u0381\u0001\u0000\u0000\u0000\u00d0\u0386\u0001\u0000\u0000\u0000\u00d2"+ + "\u038a\u0001\u0000\u0000\u0000\u00d4\u0392\u0001\u0000\u0000\u0000\u00d6"+ + "\u03a7\u0001\u0000\u0000\u0000\u00d8\u03ab\u0001\u0000\u0000\u0000\u00da"+ + "\u03af\u0001\u0000\u0000\u0000\u00dc\u03b3\u0001\u0000\u0000\u0000\u00de"+ + "\u03b7\u0001\u0000\u0000\u0000\u00e0\u03bb\u0001\u0000\u0000\u0000\u00e2"+ + "\u03c0\u0001\u0000\u0000\u0000\u00e4\u03c4\u0001\u0000\u0000\u0000\u00e6"+ + "\u03c8\u0001\u0000\u0000\u0000\u00e8\u03cc\u0001\u0000\u0000\u0000\u00ea"+ + "\u03cf\u0001\u0000\u0000\u0000\u00ec\u03d3\u0001\u0000\u0000\u0000\u00ee"+ + "\u03d7\u0001\u0000\u0000\u0000\u00f0\u03db\u0001\u0000\u0000\u0000\u00f2"+ + "\u03df\u0001\u0000\u0000\u0000\u00f4\u03e4\u0001\u0000\u0000\u0000\u00f6"+ + "\u03e9\u0001\u0000\u0000\u0000\u00f8\u03ee\u0001\u0000\u0000\u0000\u00fa"+ + "\u03f5\u0001\u0000\u0000\u0000\u00fc\u03fe\u0001\u0000\u0000\u0000\u00fe"+ + "\u0405\u0001\u0000\u0000\u0000\u0100\u0409\u0001\u0000\u0000\u0000\u0102"+ + "\u040d\u0001\u0000\u0000\u0000\u0104\u0411\u0001\u0000\u0000\u0000\u0106"+ + "\u0415\u0001\u0000\u0000\u0000\u0108\u0419\u0001\u0000\u0000\u0000\u010a"+ + "\u041f\u0001\u0000\u0000\u0000\u010c\u0423\u0001\u0000\u0000\u0000\u010e"+ + "\u0427\u0001\u0000\u0000\u0000\u0110\u042b\u0001\u0000\u0000\u0000\u0112"+ + "\u042f\u0001\u0000\u0000\u0000\u0114\u0433\u0001\u0000\u0000\u0000\u0116"+ + "\u0437\u0001\u0000\u0000\u0000\u0118\u043b\u0001\u0000\u0000\u0000\u011a"+ + "\u043f\u0001\u0000\u0000\u0000\u011c\u0443\u0001\u0000\u0000\u0000\u011e"+ + "\u0448\u0001\u0000\u0000\u0000\u0120\u044c\u0001\u0000\u0000\u0000\u0122"+ + "\u0450\u0001\u0000\u0000\u0000\u0124\u0454\u0001\u0000\u0000\u0000\u0126"+ + "\u0458\u0001\u0000\u0000\u0000\u0128\u045c\u0001\u0000\u0000\u0000\u012a"+ + "\u0460\u0001\u0000\u0000\u0000\u012c\u0465\u0001\u0000\u0000\u0000\u012e"+ + "\u046a\u0001\u0000\u0000\u0000\u0130\u046e\u0001\u0000\u0000\u0000\u0132"+ + "\u0472\u0001\u0000\u0000\u0000\u0134\u0476\u0001\u0000\u0000\u0000\u0136"+ + "\u047b\u0001\u0000\u0000\u0000\u0138\u0485\u0001\u0000\u0000\u0000\u013a"+ + "\u0489\u0001\u0000\u0000\u0000\u013c\u048d\u0001\u0000\u0000\u0000\u013e"+ + "\u0491\u0001\u0000\u0000\u0000\u0140\u0496\u0001\u0000\u0000\u0000\u0142"+ + "\u049d\u0001\u0000\u0000\u0000\u0144\u04a1\u0001\u0000\u0000\u0000\u0146"+ + "\u04a5\u0001\u0000\u0000\u0000\u0148\u04a9\u0001\u0000\u0000\u0000\u014a"+ + "\u014b\u0005d\u0000\u0000\u014b\u014c\u0005i\u0000\u0000\u014c\u014d\u0005"+ + "s\u0000\u0000\u014d\u014e\u0005s\u0000\u0000\u014e\u014f\u0005e\u0000"+ + "\u0000\u014f\u0150\u0005c\u0000\u0000\u0150\u0151\u0005t\u0000\u0000\u0151"+ + "\u0152\u0001\u0000\u0000\u0000\u0152\u0153\u0006\u0000\u0000\u0000\u0153"+ + "\r\u0001\u0000\u0000\u0000\u0154\u0155\u0005d\u0000\u0000\u0155\u0156"+ + "\u0005r\u0000\u0000\u0156\u0157\u0005o\u0000\u0000\u0157\u0158\u0005p"+ + "\u0000\u0000\u0158\u0159\u0001\u0000\u0000\u0000\u0159\u015a\u0006\u0001"+ + "\u0001\u0000\u015a\u000f\u0001\u0000\u0000\u0000\u015b\u015c\u0005e\u0000"+ + "\u0000\u015c\u015d\u0005n\u0000\u0000\u015d\u015e\u0005r\u0000\u0000\u015e"+ + "\u015f\u0005i\u0000\u0000\u015f\u0160\u0005c\u0000\u0000\u0160\u0161\u0005"+ + "h\u0000\u0000\u0161\u0162\u0001\u0000\u0000\u0000\u0162\u0163\u0006\u0002"+ + "\u0002\u0000\u0163\u0011\u0001\u0000\u0000\u0000\u0164\u0165\u0005e\u0000"+ + "\u0000\u0165\u0166\u0005v\u0000\u0000\u0166\u0167\u0005a\u0000\u0000\u0167"+ + "\u0168\u0005l\u0000\u0000\u0168\u0169\u0001\u0000\u0000\u0000\u0169\u016a"+ + "\u0006\u0003\u0000\u0000\u016a\u0013\u0001\u0000\u0000\u0000\u016b\u016c"+ + "\u0005e\u0000\u0000\u016c\u016d\u0005x\u0000\u0000\u016d\u016e\u0005p"+ + "\u0000\u0000\u016e\u016f\u0005l\u0000\u0000\u016f\u0170\u0005a\u0000\u0000"+ + "\u0170\u0171\u0005i\u0000\u0000\u0171\u0172\u0005n\u0000\u0000\u0172\u0173"+ + "\u0001\u0000\u0000\u0000\u0173\u0174\u0006\u0004\u0003\u0000\u0174\u0015"+ + "\u0001\u0000\u0000\u0000\u0175\u0176\u0005f\u0000\u0000\u0176\u0177\u0005"+ + "r\u0000\u0000\u0177\u0178\u0005o\u0000\u0000\u0178\u0179\u0005m\u0000"+ + "\u0000\u0179\u017a\u0001\u0000\u0000\u0000\u017a\u017b\u0006\u0005\u0004"+ + "\u0000\u017b\u0017\u0001\u0000\u0000\u0000\u017c\u017d\u0005g\u0000\u0000"+ + "\u017d\u017e\u0005r\u0000\u0000\u017e\u017f\u0005o\u0000\u0000\u017f\u0180"+ + "\u0005k\u0000\u0000\u0180\u0181\u0001\u0000\u0000\u0000\u0181\u0182\u0006"+ + "\u0006\u0000\u0000\u0182\u0019\u0001\u0000\u0000\u0000\u0183\u0184\u0005"+ + "i\u0000\u0000\u0184\u0185\u0005n\u0000\u0000\u0185\u0186\u0005l\u0000"+ + "\u0000\u0186\u0187\u0005i\u0000\u0000\u0187\u0188\u0005n\u0000\u0000\u0188"+ + "\u0189\u0005e\u0000\u0000\u0189\u018a\u0005s\u0000\u0000\u018a\u018b\u0005"+ + "t\u0000\u0000\u018b\u018c\u0005a\u0000\u0000\u018c\u018d\u0005t\u0000"+ + "\u0000\u018d\u018e\u0005s\u0000\u0000\u018e\u018f\u0001\u0000\u0000\u0000"+ + "\u018f\u0190\u0006\u0007\u0000\u0000\u0190\u001b\u0001\u0000\u0000\u0000"+ + "\u0191\u0192\u0005k\u0000\u0000\u0192\u0193\u0005e\u0000\u0000\u0193\u0194"+ + "\u0005e\u0000\u0000\u0194\u0195\u0005p\u0000\u0000\u0195\u0196\u0001\u0000"+ + "\u0000\u0000\u0196\u0197\u0006\b\u0001\u0000\u0197\u001d\u0001\u0000\u0000"+ + "\u0000\u0198\u0199\u0005l\u0000\u0000\u0199\u019a\u0005i\u0000\u0000\u019a"+ + "\u019b\u0005m\u0000\u0000\u019b\u019c\u0005i\u0000\u0000\u019c\u019d\u0005"+ + "t\u0000\u0000\u019d\u019e\u0001\u0000\u0000\u0000\u019e\u019f\u0006\t"+ + "\u0000\u0000\u019f\u001f\u0001\u0000\u0000\u0000\u01a0\u01a1\u0005m\u0000"+ + "\u0000\u01a1\u01a2\u0005e\u0000\u0000\u01a2\u01a3\u0005t\u0000\u0000\u01a3"+ + "\u01a4\u0005a\u0000\u0000\u01a4\u01a5\u0001\u0000\u0000\u0000\u01a5\u01a6"+ + "\u0006\n\u0005\u0000\u01a6!\u0001\u0000\u0000\u0000\u01a7\u01a8\u0005"+ + "m\u0000\u0000\u01a8\u01a9\u0005v\u0000\u0000\u01a9\u01aa\u0005_\u0000"+ + "\u0000\u01aa\u01ab\u0005e\u0000\u0000\u01ab\u01ac\u0005x\u0000\u0000\u01ac"+ + "\u01ad\u0005p\u0000\u0000\u01ad\u01ae\u0005a\u0000\u0000\u01ae\u01af\u0005"+ + "n\u0000\u0000\u01af\u01b0\u0005d\u0000\u0000\u01b0\u01b1\u0001\u0000\u0000"+ + "\u0000\u01b1\u01b2\u0006\u000b\u0006\u0000\u01b2#\u0001\u0000\u0000\u0000"+ + "\u01b3\u01b4\u0005r\u0000\u0000\u01b4\u01b5\u0005e\u0000\u0000\u01b5\u01b6"+ + "\u0005n\u0000\u0000\u01b6\u01b7\u0005a\u0000\u0000\u01b7\u01b8\u0005m"+ + "\u0000\u0000\u01b8\u01b9\u0005e\u0000\u0000\u01b9\u01ba\u0001\u0000\u0000"+ + "\u0000\u01ba\u01bb\u0006\f\u0007\u0000\u01bb%\u0001\u0000\u0000\u0000"+ + "\u01bc\u01bd\u0005r\u0000\u0000\u01bd\u01be\u0005o\u0000\u0000\u01be\u01bf"+ + "\u0005w\u0000\u0000\u01bf\u01c0\u0001\u0000\u0000\u0000\u01c0\u01c1\u0006"+ + "\r\u0000\u0000\u01c1\'\u0001\u0000\u0000\u0000\u01c2\u01c3\u0005s\u0000"+ + "\u0000\u01c3\u01c4\u0005h\u0000\u0000\u01c4\u01c5\u0005o\u0000\u0000\u01c5"+ + "\u01c6\u0005w\u0000\u0000\u01c6\u01c7\u0001\u0000\u0000\u0000\u01c7\u01c8"+ + "\u0006\u000e\b\u0000\u01c8)\u0001\u0000\u0000\u0000\u01c9\u01ca\u0005"+ + "s\u0000\u0000\u01ca\u01cb\u0005o\u0000\u0000\u01cb\u01cc\u0005r\u0000"+ + "\u0000\u01cc\u01cd\u0005t\u0000\u0000\u01cd\u01ce\u0001\u0000\u0000\u0000"+ + "\u01ce\u01cf\u0006\u000f\u0000\u0000\u01cf+\u0001\u0000\u0000\u0000\u01d0"+ + "\u01d1\u0005s\u0000\u0000\u01d1\u01d2\u0005t\u0000\u0000\u01d2\u01d3\u0005"+ + "a\u0000\u0000\u01d3\u01d4\u0005t\u0000\u0000\u01d4\u01d5\u0005s\u0000"+ + "\u0000\u01d5\u01d6\u0001\u0000\u0000\u0000\u01d6\u01d7\u0006\u0010\u0000"+ + "\u0000\u01d7-\u0001\u0000\u0000\u0000\u01d8\u01d9\u0005w\u0000\u0000\u01d9"+ + "\u01da\u0005h\u0000\u0000\u01da\u01db\u0005e\u0000\u0000\u01db\u01dc\u0005"+ + "r\u0000\u0000\u01dc\u01dd\u0005e\u0000\u0000\u01dd\u01de\u0001\u0000\u0000"+ + "\u0000\u01de\u01df\u0006\u0011\u0000\u0000\u01df/\u0001\u0000\u0000\u0000"+ + "\u01e0\u01e2\b\u0000\u0000\u0000\u01e1\u01e0\u0001\u0000\u0000\u0000\u01e2"+ + "\u01e3\u0001\u0000\u0000\u0000\u01e3\u01e1\u0001\u0000\u0000\u0000\u01e3"+ + "\u01e4\u0001\u0000\u0000\u0000\u01e4\u01e5\u0001\u0000\u0000\u0000\u01e5"+ + "\u01e6\u0006\u0012\u0000\u0000\u01e61\u0001\u0000\u0000\u0000\u01e7\u01e8"+ + "\u0005/\u0000\u0000\u01e8\u01e9\u0005/\u0000\u0000\u01e9\u01ed\u0001\u0000"+ + "\u0000\u0000\u01ea\u01ec\b\u0001\u0000\u0000\u01eb\u01ea\u0001\u0000\u0000"+ + "\u0000\u01ec\u01ef\u0001\u0000\u0000\u0000\u01ed\u01eb\u0001\u0000\u0000"+ + "\u0000\u01ed\u01ee\u0001\u0000\u0000\u0000\u01ee\u01f1\u0001\u0000\u0000"+ + "\u0000\u01ef\u01ed\u0001\u0000\u0000\u0000\u01f0\u01f2\u0005\r\u0000\u0000"+ + "\u01f1\u01f0\u0001\u0000\u0000\u0000\u01f1\u01f2\u0001\u0000\u0000\u0000"+ + "\u01f2\u01f4\u0001\u0000\u0000\u0000\u01f3\u01f5\u0005\n\u0000\u0000\u01f4"+ + "\u01f3\u0001\u0000\u0000\u0000\u01f4\u01f5\u0001\u0000\u0000\u0000\u01f5"+ + "\u01f6\u0001\u0000\u0000\u0000\u01f6\u01f7\u0006\u0013\t\u0000\u01f73"+ + "\u0001\u0000\u0000\u0000\u01f8\u01f9\u0005/\u0000\u0000\u01f9\u01fa\u0005"+ + "*\u0000\u0000\u01fa\u01ff\u0001\u0000\u0000\u0000\u01fb\u01fe\u00034\u0014"+ + "\u0000\u01fc\u01fe\t\u0000\u0000\u0000\u01fd\u01fb\u0001\u0000\u0000\u0000"+ + "\u01fd\u01fc\u0001\u0000\u0000\u0000\u01fe\u0201\u0001\u0000\u0000\u0000"+ + "\u01ff\u0200\u0001\u0000\u0000\u0000\u01ff\u01fd\u0001\u0000\u0000\u0000"+ + "\u0200\u0202\u0001\u0000\u0000\u0000\u0201\u01ff\u0001\u0000\u0000\u0000"+ + "\u0202\u0203\u0005*\u0000\u0000\u0203\u0204\u0005/\u0000\u0000\u0204\u0205"+ + "\u0001\u0000\u0000\u0000\u0205\u0206\u0006\u0014\t\u0000\u02065\u0001"+ + "\u0000\u0000\u0000\u0207\u0209\u0007\u0002\u0000\u0000\u0208\u0207\u0001"+ + "\u0000\u0000\u0000\u0209\u020a\u0001\u0000\u0000\u0000\u020a\u0208\u0001"+ + "\u0000\u0000\u0000\u020a\u020b\u0001\u0000\u0000\u0000\u020b\u020c\u0001"+ + "\u0000\u0000\u0000\u020c\u020d\u0006\u0015\t\u0000\u020d7\u0001\u0000"+ + "\u0000\u0000\u020e\u020f\u0003\u00a4L\u0000\u020f\u0210\u0001\u0000\u0000"+ + "\u0000\u0210\u0211\u0006\u0016\n\u0000\u0211\u0212\u0006\u0016\u000b\u0000"+ + "\u02129\u0001\u0000\u0000\u0000\u0213\u0214\u0003B\u001b\u0000\u0214\u0215"+ + "\u0001\u0000\u0000\u0000\u0215\u0216\u0006\u0017\f\u0000\u0216\u0217\u0006"+ + "\u0017\r\u0000\u0217;\u0001\u0000\u0000\u0000\u0218\u0219\u00036\u0015"+ + "\u0000\u0219\u021a\u0001\u0000\u0000\u0000\u021a\u021b\u0006\u0018\t\u0000"+ + "\u021b=\u0001\u0000\u0000\u0000\u021c\u021d\u00032\u0013\u0000\u021d\u021e"+ + "\u0001\u0000\u0000\u0000\u021e\u021f\u0006\u0019\t\u0000\u021f?\u0001"+ + "\u0000\u0000\u0000\u0220\u0221\u00034\u0014\u0000\u0221\u0222\u0001\u0000"+ + "\u0000\u0000\u0222\u0223\u0006\u001a\t\u0000\u0223A\u0001\u0000\u0000"+ + "\u0000\u0224\u0225\u0005|\u0000\u0000\u0225\u0226\u0001\u0000\u0000\u0000"+ + "\u0226\u0227\u0006\u001b\r\u0000\u0227C\u0001\u0000\u0000\u0000\u0228"+ + "\u0229\u0007\u0003\u0000\u0000\u0229E\u0001\u0000\u0000\u0000\u022a\u022b"+ + "\u0007\u0004\u0000\u0000\u022bG\u0001\u0000\u0000\u0000\u022c\u022d\u0005"+ + "\\\u0000\u0000\u022d\u022e\u0007\u0005\u0000\u0000\u022eI\u0001\u0000"+ + "\u0000\u0000\u022f\u0230\b\u0006\u0000\u0000\u0230K\u0001\u0000\u0000"+ + "\u0000\u0231\u0233\u0007\u0007\u0000\u0000\u0232\u0234\u0007\b\u0000\u0000"+ + "\u0233\u0232\u0001\u0000\u0000\u0000\u0233\u0234\u0001\u0000\u0000\u0000"+ + "\u0234\u0236\u0001\u0000\u0000\u0000\u0235\u0237\u0003D\u001c\u0000\u0236"+ + "\u0235\u0001\u0000\u0000\u0000\u0237\u0238\u0001\u0000\u0000\u0000\u0238"+ + "\u0236\u0001\u0000\u0000\u0000\u0238\u0239\u0001\u0000\u0000\u0000\u0239"+ + "M\u0001\u0000\u0000\u0000\u023a\u023b\u0005@\u0000\u0000\u023bO\u0001"+ + "\u0000\u0000\u0000\u023c\u023d\u0005`\u0000\u0000\u023dQ\u0001\u0000\u0000"+ + "\u0000\u023e\u0242\b\t\u0000\u0000\u023f\u0240\u0005`\u0000\u0000\u0240"+ + "\u0242\u0005`\u0000\u0000\u0241\u023e\u0001\u0000\u0000\u0000\u0241\u023f"+ + "\u0001\u0000\u0000\u0000\u0242S\u0001\u0000\u0000\u0000\u0243\u0244\u0005"+ + "_\u0000\u0000\u0244U\u0001\u0000\u0000\u0000\u0245\u0249\u0003F\u001d"+ + "\u0000\u0246\u0249\u0003D\u001c\u0000\u0247\u0249\u0003T$\u0000\u0248"+ + "\u0245\u0001\u0000\u0000\u0000\u0248\u0246\u0001\u0000\u0000\u0000\u0248"+ + "\u0247\u0001\u0000\u0000\u0000\u0249W\u0001\u0000\u0000\u0000\u024a\u024f"+ + "\u0005\"\u0000\u0000\u024b\u024e\u0003H\u001e\u0000\u024c\u024e\u0003"+ + "J\u001f\u0000\u024d\u024b\u0001\u0000\u0000\u0000\u024d\u024c\u0001\u0000"+ + "\u0000\u0000\u024e\u0251\u0001\u0000\u0000\u0000\u024f\u024d\u0001\u0000"+ + "\u0000\u0000\u024f\u0250\u0001\u0000\u0000\u0000\u0250\u0252\u0001\u0000"+ + "\u0000\u0000\u0251\u024f\u0001\u0000\u0000\u0000\u0252\u0268\u0005\"\u0000"+ + "\u0000\u0253\u0254\u0005\"\u0000\u0000\u0254\u0255\u0005\"\u0000\u0000"+ + "\u0255\u0256\u0005\"\u0000\u0000\u0256\u025a\u0001\u0000\u0000\u0000\u0257"+ + "\u0259\b\u0001\u0000\u0000\u0258\u0257\u0001\u0000\u0000\u0000\u0259\u025c"+ + "\u0001\u0000\u0000\u0000\u025a\u025b\u0001\u0000\u0000\u0000\u025a\u0258"+ + "\u0001\u0000\u0000\u0000\u025b\u025d\u0001\u0000\u0000\u0000\u025c\u025a"+ + "\u0001\u0000\u0000\u0000\u025d\u025e\u0005\"\u0000\u0000\u025e\u025f\u0005"+ + "\"\u0000\u0000\u025f\u0260\u0005\"\u0000\u0000\u0260\u0262\u0001\u0000"+ + "\u0000\u0000\u0261\u0263\u0005\"\u0000\u0000\u0262\u0261\u0001\u0000\u0000"+ + "\u0000\u0262\u0263\u0001\u0000\u0000\u0000\u0263\u0265\u0001\u0000\u0000"+ + "\u0000\u0264\u0266\u0005\"\u0000\u0000\u0265\u0264\u0001\u0000\u0000\u0000"+ + "\u0265\u0266\u0001\u0000\u0000\u0000\u0266\u0268\u0001\u0000\u0000\u0000"+ + "\u0267\u024a\u0001\u0000\u0000\u0000\u0267\u0253\u0001\u0000\u0000\u0000"+ + "\u0268Y\u0001\u0000\u0000\u0000\u0269\u026b\u0003D\u001c\u0000\u026a\u0269"+ + "\u0001\u0000\u0000\u0000\u026b\u026c\u0001\u0000\u0000\u0000\u026c\u026a"+ + "\u0001\u0000\u0000\u0000\u026c\u026d\u0001\u0000\u0000\u0000\u026d[\u0001"+ + "\u0000\u0000\u0000\u026e\u0270\u0003D\u001c\u0000\u026f\u026e\u0001\u0000"+ + "\u0000\u0000\u0270\u0271\u0001\u0000\u0000\u0000\u0271\u026f\u0001\u0000"+ + "\u0000\u0000\u0271\u0272\u0001\u0000\u0000\u0000\u0272\u0273\u0001\u0000"+ + "\u0000\u0000\u0273\u0277\u0003l0\u0000\u0274\u0276\u0003D\u001c\u0000"+ + "\u0275\u0274\u0001\u0000\u0000\u0000\u0276\u0279\u0001\u0000\u0000\u0000"+ + "\u0277\u0275\u0001\u0000\u0000\u0000\u0277\u0278\u0001\u0000\u0000\u0000"+ + "\u0278\u0299\u0001\u0000\u0000\u0000\u0279\u0277\u0001\u0000\u0000\u0000"+ + "\u027a\u027c\u0003l0\u0000\u027b\u027d\u0003D\u001c\u0000\u027c\u027b"+ + "\u0001\u0000\u0000\u0000\u027d\u027e\u0001\u0000\u0000\u0000\u027e\u027c"+ + "\u0001\u0000\u0000\u0000\u027e\u027f\u0001\u0000\u0000\u0000\u027f\u0299"+ + "\u0001\u0000\u0000\u0000\u0280\u0282\u0003D\u001c\u0000\u0281\u0280\u0001"+ + "\u0000\u0000\u0000\u0282\u0283\u0001\u0000\u0000\u0000\u0283\u0281\u0001"+ + "\u0000\u0000\u0000\u0283\u0284\u0001\u0000\u0000\u0000\u0284\u028c\u0001"+ + "\u0000\u0000\u0000\u0285\u0289\u0003l0\u0000\u0286\u0288\u0003D\u001c"+ + "\u0000\u0287\u0286\u0001\u0000\u0000\u0000\u0288\u028b\u0001\u0000\u0000"+ + "\u0000\u0289\u0287\u0001\u0000\u0000\u0000\u0289\u028a\u0001\u0000\u0000"+ + "\u0000\u028a\u028d\u0001\u0000\u0000\u0000\u028b\u0289\u0001\u0000\u0000"+ + "\u0000\u028c\u0285\u0001\u0000\u0000\u0000\u028c\u028d\u0001\u0000\u0000"+ + "\u0000\u028d\u028e\u0001\u0000\u0000\u0000\u028e\u028f\u0003L \u0000\u028f"+ + "\u0299\u0001\u0000\u0000\u0000\u0290\u0292\u0003l0\u0000\u0291\u0293\u0003"+ + "D\u001c\u0000\u0292\u0291\u0001\u0000\u0000\u0000\u0293\u0294\u0001\u0000"+ + "\u0000\u0000\u0294\u0292\u0001\u0000\u0000\u0000\u0294\u0295\u0001\u0000"+ + "\u0000\u0000\u0295\u0296\u0001\u0000\u0000\u0000\u0296\u0297\u0003L \u0000"+ + "\u0297\u0299\u0001\u0000\u0000\u0000\u0298\u026f\u0001\u0000\u0000\u0000"+ + "\u0298\u027a\u0001\u0000\u0000\u0000\u0298\u0281\u0001\u0000\u0000\u0000"+ + "\u0298\u0290\u0001\u0000\u0000\u0000\u0299]\u0001\u0000\u0000\u0000\u029a"+ + "\u029b\u0005b\u0000\u0000\u029b\u029c\u0005y\u0000\u0000\u029c_\u0001"+ + "\u0000\u0000\u0000\u029d\u029e\u0005a\u0000\u0000\u029e\u029f\u0005n\u0000"+ + "\u0000\u029f\u02a0\u0005d\u0000\u0000\u02a0a\u0001\u0000\u0000\u0000\u02a1"+ + "\u02a2\u0005a\u0000\u0000\u02a2\u02a3\u0005s\u0000\u0000\u02a3\u02a4\u0005"+ + "c\u0000\u0000\u02a4c\u0001\u0000\u0000\u0000\u02a5\u02a6\u0005=\u0000"+ + "\u0000\u02a6e\u0001\u0000\u0000\u0000\u02a7\u02a8\u0005:\u0000\u0000\u02a8"+ + "\u02a9\u0005:\u0000\u0000\u02a9g\u0001\u0000\u0000\u0000\u02aa\u02ab\u0005"+ + ",\u0000\u0000\u02abi\u0001\u0000\u0000\u0000\u02ac\u02ad\u0005d\u0000"+ + "\u0000\u02ad\u02ae\u0005e\u0000\u0000\u02ae\u02af\u0005s\u0000\u0000\u02af"+ + "\u02b0\u0005c\u0000\u0000\u02b0k\u0001\u0000\u0000\u0000\u02b1\u02b2\u0005"+ + ".\u0000\u0000\u02b2m\u0001\u0000\u0000\u0000\u02b3\u02b4\u0005f\u0000"+ + "\u0000\u02b4\u02b5\u0005a\u0000\u0000\u02b5\u02b6\u0005l\u0000\u0000\u02b6"+ + "\u02b7\u0005s\u0000\u0000\u02b7\u02b8\u0005e\u0000\u0000\u02b8o\u0001"+ + "\u0000\u0000\u0000\u02b9\u02ba\u0005f\u0000\u0000\u02ba\u02bb\u0005i\u0000"+ + "\u0000\u02bb\u02bc\u0005r\u0000\u0000\u02bc\u02bd\u0005s\u0000\u0000\u02bd"+ + "\u02be\u0005t\u0000\u0000\u02beq\u0001\u0000\u0000\u0000\u02bf\u02c0\u0005"+ + "l\u0000\u0000\u02c0\u02c1\u0005a\u0000\u0000\u02c1\u02c2\u0005s\u0000"+ + "\u0000\u02c2\u02c3\u0005t\u0000\u0000\u02c3s\u0001\u0000\u0000\u0000\u02c4"+ + "\u02c5\u0005(\u0000\u0000\u02c5u\u0001\u0000\u0000\u0000\u02c6\u02c7\u0005"+ + "i\u0000\u0000\u02c7\u02c8\u0005n\u0000\u0000\u02c8w\u0001\u0000\u0000"+ + "\u0000\u02c9\u02ca\u0005i\u0000\u0000\u02ca\u02cb\u0005s\u0000\u0000\u02cb"+ + "y\u0001\u0000\u0000\u0000\u02cc\u02cd\u0005l\u0000\u0000\u02cd\u02ce\u0005"+ + "i\u0000\u0000\u02ce\u02cf\u0005k\u0000\u0000\u02cf\u02d0\u0005e\u0000"+ + "\u0000\u02d0{\u0001\u0000\u0000\u0000\u02d1\u02d2\u0005n\u0000\u0000\u02d2"+ + "\u02d3\u0005o\u0000\u0000\u02d3\u02d4\u0005t\u0000\u0000\u02d4}\u0001"+ + "\u0000\u0000\u0000\u02d5\u02d6\u0005n\u0000\u0000\u02d6\u02d7\u0005u\u0000"+ + "\u0000\u02d7\u02d8\u0005l\u0000\u0000\u02d8\u02d9\u0005l\u0000\u0000\u02d9"+ + "\u007f\u0001\u0000\u0000\u0000\u02da\u02db\u0005n\u0000\u0000\u02db\u02dc"+ + "\u0005u\u0000\u0000\u02dc\u02dd\u0005l\u0000\u0000\u02dd\u02de\u0005l"+ + "\u0000\u0000\u02de\u02df\u0005s\u0000\u0000\u02df\u0081\u0001\u0000\u0000"+ + "\u0000\u02e0\u02e1\u0005o\u0000\u0000\u02e1\u02e2\u0005r\u0000\u0000\u02e2"+ + "\u0083\u0001\u0000\u0000\u0000\u02e3\u02e4\u0005?\u0000\u0000\u02e4\u0085"+ + "\u0001\u0000\u0000\u0000\u02e5\u02e6\u0005r\u0000\u0000\u02e6\u02e7\u0005"+ + "l\u0000\u0000\u02e7\u02e8\u0005i\u0000\u0000\u02e8\u02e9\u0005k\u0000"+ + "\u0000\u02e9\u02ea\u0005e\u0000\u0000\u02ea\u0087\u0001\u0000\u0000\u0000"+ + "\u02eb\u02ec\u0005)\u0000\u0000\u02ec\u0089\u0001\u0000\u0000\u0000\u02ed"+ + "\u02ee\u0005t\u0000\u0000\u02ee\u02ef\u0005r\u0000\u0000\u02ef\u02f0\u0005"+ + "u\u0000\u0000\u02f0\u02f1\u0005e\u0000\u0000\u02f1\u008b\u0001\u0000\u0000"+ + "\u0000\u02f2\u02f3\u0005=\u0000\u0000\u02f3\u02f4\u0005=\u0000\u0000\u02f4"+ + "\u008d\u0001\u0000\u0000\u0000\u02f5\u02f6\u0005=\u0000\u0000\u02f6\u02f7"+ + "\u0005~\u0000\u0000\u02f7\u008f\u0001\u0000\u0000\u0000\u02f8\u02f9\u0005"+ + "!\u0000\u0000\u02f9\u02fa\u0005=\u0000\u0000\u02fa\u0091\u0001\u0000\u0000"+ + "\u0000\u02fb\u02fc\u0005<\u0000\u0000\u02fc\u0093\u0001\u0000\u0000\u0000"+ + "\u02fd\u02fe\u0005<\u0000\u0000\u02fe\u02ff\u0005=\u0000\u0000\u02ff\u0095"+ + "\u0001\u0000\u0000\u0000\u0300\u0301\u0005>\u0000\u0000\u0301\u0097\u0001"+ + "\u0000\u0000\u0000\u0302\u0303\u0005>\u0000\u0000\u0303\u0304\u0005=\u0000"+ + "\u0000\u0304\u0099\u0001\u0000\u0000\u0000\u0305\u0306\u0005+\u0000\u0000"+ + "\u0306\u009b\u0001\u0000\u0000\u0000\u0307\u0308\u0005-\u0000\u0000\u0308"+ + "\u009d\u0001\u0000\u0000\u0000\u0309\u030a\u0005*\u0000\u0000\u030a\u009f"+ + "\u0001\u0000\u0000\u0000\u030b\u030c\u0005/\u0000\u0000\u030c\u00a1\u0001"+ + "\u0000\u0000\u0000\u030d\u030e\u0005%\u0000\u0000\u030e\u00a3\u0001\u0000"+ + "\u0000\u0000\u030f\u0310\u0005[\u0000\u0000\u0310\u0311\u0001\u0000\u0000"+ + "\u0000\u0311\u0312\u0006L\u0000\u0000\u0312\u0313\u0006L\u0000\u0000\u0313"+ + "\u00a5\u0001\u0000\u0000\u0000\u0314\u0315\u0005]\u0000\u0000\u0315\u0316"+ + "\u0001\u0000\u0000\u0000\u0316\u0317\u0006M\r\u0000\u0317\u0318\u0006"+ + "M\r\u0000\u0318\u00a7\u0001\u0000\u0000\u0000\u0319\u031d\u0003F\u001d"+ + "\u0000\u031a\u031c\u0003V%\u0000\u031b\u031a\u0001\u0000\u0000\u0000\u031c"+ + "\u031f\u0001\u0000\u0000\u0000\u031d\u031b\u0001\u0000\u0000\u0000\u031d"+ + "\u031e\u0001\u0000\u0000\u0000\u031e\u032a\u0001\u0000\u0000\u0000\u031f"+ + "\u031d\u0001\u0000\u0000\u0000\u0320\u0323\u0003T$\u0000\u0321\u0323\u0003"+ + "N!\u0000\u0322\u0320\u0001\u0000\u0000\u0000\u0322\u0321\u0001\u0000\u0000"+ + "\u0000\u0323\u0325\u0001\u0000\u0000\u0000\u0324\u0326\u0003V%\u0000\u0325"+ + "\u0324\u0001\u0000\u0000\u0000\u0326\u0327\u0001\u0000\u0000\u0000\u0327"+ + "\u0325\u0001\u0000\u0000\u0000\u0327\u0328\u0001\u0000\u0000\u0000\u0328"+ + "\u032a\u0001\u0000\u0000\u0000\u0329\u0319\u0001\u0000\u0000\u0000\u0329"+ + "\u0322\u0001\u0000\u0000\u0000\u032a\u00a9\u0001\u0000\u0000\u0000\u032b"+ + "\u032d\u0003P\"\u0000\u032c\u032e\u0003R#\u0000\u032d\u032c\u0001\u0000"+ + "\u0000\u0000\u032e\u032f\u0001\u0000\u0000\u0000\u032f\u032d\u0001\u0000"+ + "\u0000\u0000\u032f\u0330\u0001\u0000\u0000\u0000\u0330\u0331\u0001\u0000"+ + "\u0000\u0000\u0331\u0332\u0003P\"\u0000\u0332\u00ab\u0001\u0000\u0000"+ + "\u0000\u0333\u0334\u0003\u00aaO\u0000\u0334\u00ad\u0001\u0000\u0000\u0000"+ + "\u0335\u0336\u00032\u0013\u0000\u0336\u0337\u0001\u0000\u0000\u0000\u0337"+ + "\u0338\u0006Q\t\u0000\u0338\u00af\u0001\u0000\u0000\u0000\u0339\u033a"+ + "\u00034\u0014\u0000\u033a\u033b\u0001\u0000\u0000\u0000\u033b\u033c\u0006"+ + "R\t\u0000\u033c\u00b1\u0001\u0000\u0000\u0000\u033d\u033e\u00036\u0015"+ + "\u0000\u033e\u033f\u0001\u0000\u0000\u0000\u033f\u0340\u0006S\t\u0000"+ + "\u0340\u00b3\u0001\u0000\u0000\u0000\u0341\u0342\u0003B\u001b\u0000\u0342"+ + "\u0343\u0001\u0000\u0000\u0000\u0343\u0344\u0006T\f\u0000\u0344\u0345"+ + "\u0006T\r\u0000\u0345\u00b5\u0001\u0000\u0000\u0000\u0346\u0347\u0003"+ + "\u00a4L\u0000\u0347\u0348\u0001\u0000\u0000\u0000\u0348\u0349\u0006U\n"+ + "\u0000\u0349\u00b7\u0001\u0000\u0000\u0000\u034a\u034b\u0003\u00a6M\u0000"+ + "\u034b\u034c\u0001\u0000\u0000\u0000\u034c\u034d\u0006V\u000e\u0000\u034d"+ + "\u00b9\u0001\u0000\u0000\u0000\u034e\u034f\u0003h.\u0000\u034f\u0350\u0001"+ + "\u0000\u0000\u0000\u0350\u0351\u0006W\u000f\u0000\u0351\u00bb\u0001\u0000"+ + "\u0000\u0000\u0352\u0353\u0003d,\u0000\u0353\u0354\u0001\u0000\u0000\u0000"+ + "\u0354\u0355\u0006X\u0010\u0000\u0355\u00bd\u0001\u0000\u0000\u0000\u0356"+ + "\u0357\u0003X&\u0000\u0357\u0358\u0001\u0000\u0000\u0000\u0358\u0359\u0006"+ + "Y\u0011\u0000\u0359\u00bf\u0001\u0000\u0000\u0000\u035a\u035b\u0005o\u0000"+ + "\u0000\u035b\u035c\u0005p\u0000\u0000\u035c\u035d\u0005t\u0000\u0000\u035d"+ + "\u035e\u0005i\u0000\u0000\u035e\u035f\u0005o\u0000\u0000\u035f\u0360\u0005"+ + "n\u0000\u0000\u0360\u0361\u0005s\u0000\u0000\u0361\u00c1\u0001\u0000\u0000"+ + "\u0000\u0362\u0363\u0005m\u0000\u0000\u0363\u0364\u0005e\u0000\u0000\u0364"+ + "\u0365\u0005t\u0000\u0000\u0365\u0366\u0005a\u0000\u0000\u0366\u0367\u0005"+ + "d\u0000\u0000\u0367\u0368\u0005a\u0000\u0000\u0368\u0369\u0005t\u0000"+ + "\u0000\u0369\u036a\u0005a\u0000\u0000\u036a\u00c3\u0001\u0000\u0000\u0000"+ + "\u036b\u036f\b\n\u0000\u0000\u036c\u036d\u0005/\u0000\u0000\u036d\u036f"+ + "\b\u000b\u0000\u0000\u036e\u036b\u0001\u0000\u0000\u0000\u036e\u036c\u0001"+ + "\u0000\u0000\u0000\u036f\u00c5\u0001\u0000\u0000\u0000\u0370\u0372\u0003"+ + "\u00c4\\\u0000\u0371\u0370\u0001\u0000\u0000\u0000\u0372\u0373\u0001\u0000"+ + "\u0000\u0000\u0373\u0371\u0001\u0000\u0000\u0000\u0373\u0374\u0001\u0000"+ + "\u0000\u0000\u0374\u00c7\u0001\u0000\u0000\u0000\u0375\u0376\u00032\u0013"+ + "\u0000\u0376\u0377\u0001\u0000\u0000\u0000\u0377\u0378\u0006^\t\u0000"+ + "\u0378\u00c9\u0001\u0000\u0000\u0000\u0379\u037a\u00034\u0014\u0000\u037a"+ + "\u037b\u0001\u0000\u0000\u0000\u037b\u037c\u0006_\t\u0000\u037c\u00cb"+ + "\u0001\u0000\u0000\u0000\u037d\u037e\u00036\u0015\u0000\u037e\u037f\u0001"+ + "\u0000\u0000\u0000\u037f\u0380\u0006`\t\u0000\u0380\u00cd\u0001\u0000"+ + "\u0000\u0000\u0381\u0382\u0003B\u001b\u0000\u0382\u0383\u0001\u0000\u0000"+ + "\u0000\u0383\u0384\u0006a\f\u0000\u0384\u0385\u0006a\r\u0000\u0385\u00cf"+ + "\u0001\u0000\u0000\u0000\u0386\u0387\u0003l0\u0000\u0387\u0388\u0001\u0000"+ + "\u0000\u0000\u0388\u0389\u0006b\u0012\u0000\u0389\u00d1\u0001\u0000\u0000"+ + "\u0000\u038a\u038b\u0003h.\u0000\u038b\u038c\u0001\u0000\u0000\u0000\u038c"+ + "\u038d\u0006c\u000f\u0000\u038d\u00d3\u0001\u0000\u0000\u0000\u038e\u0393"+ + "\u0003F\u001d\u0000\u038f\u0393\u0003D\u001c\u0000\u0390\u0393\u0003T"+ + "$\u0000\u0391\u0393\u0003\u009eI\u0000\u0392\u038e\u0001\u0000\u0000\u0000"+ + "\u0392\u038f\u0001\u0000\u0000\u0000\u0392\u0390\u0001\u0000\u0000\u0000"+ + "\u0392\u0391\u0001\u0000\u0000\u0000\u0393\u00d5\u0001\u0000\u0000\u0000"+ + "\u0394\u0397\u0003F\u001d\u0000\u0395\u0397\u0003\u009eI\u0000\u0396\u0394"+ + "\u0001\u0000\u0000\u0000\u0396\u0395\u0001\u0000\u0000\u0000\u0397\u039b"+ + "\u0001\u0000\u0000\u0000\u0398\u039a\u0003\u00d4d\u0000\u0399\u0398\u0001"+ + "\u0000\u0000\u0000\u039a\u039d\u0001\u0000\u0000\u0000\u039b\u0399\u0001"+ + "\u0000\u0000\u0000\u039b\u039c\u0001\u0000\u0000\u0000\u039c\u03a8\u0001"+ + "\u0000\u0000\u0000\u039d\u039b\u0001\u0000\u0000\u0000\u039e\u03a1\u0003"+ + "T$\u0000\u039f\u03a1\u0003N!\u0000\u03a0\u039e\u0001\u0000\u0000\u0000"+ + "\u03a0\u039f\u0001\u0000\u0000\u0000\u03a1\u03a3\u0001\u0000\u0000\u0000"+ + "\u03a2\u03a4\u0003\u00d4d\u0000\u03a3\u03a2\u0001\u0000\u0000\u0000\u03a4"+ + "\u03a5\u0001\u0000\u0000\u0000\u03a5\u03a3\u0001\u0000\u0000\u0000\u03a5"+ + "\u03a6\u0001\u0000\u0000\u0000\u03a6\u03a8\u0001\u0000\u0000\u0000\u03a7"+ + "\u0396\u0001\u0000\u0000\u0000\u03a7\u03a0\u0001\u0000\u0000\u0000\u03a8"+ + "\u00d7\u0001\u0000\u0000\u0000\u03a9\u03ac\u0003\u00d6e\u0000\u03aa\u03ac"+ + "\u0003\u00aaO\u0000\u03ab\u03a9\u0001\u0000\u0000\u0000\u03ab\u03aa\u0001"+ + "\u0000\u0000\u0000\u03ac\u03ad\u0001\u0000\u0000\u0000\u03ad\u03ab\u0001"+ + "\u0000\u0000\u0000\u03ad\u03ae\u0001\u0000\u0000\u0000\u03ae\u00d9\u0001"+ + "\u0000\u0000\u0000\u03af\u03b0\u00032\u0013\u0000\u03b0\u03b1\u0001\u0000"+ + "\u0000\u0000\u03b1\u03b2\u0006g\t\u0000\u03b2\u00db\u0001\u0000\u0000"+ + "\u0000\u03b3\u03b4\u00034\u0014\u0000\u03b4\u03b5\u0001\u0000\u0000\u0000"+ + "\u03b5\u03b6\u0006h\t\u0000\u03b6\u00dd\u0001\u0000\u0000\u0000\u03b7"+ + "\u03b8\u00036\u0015\u0000\u03b8\u03b9\u0001\u0000\u0000\u0000\u03b9\u03ba"+ + "\u0006i\t\u0000\u03ba\u00df\u0001\u0000\u0000\u0000\u03bb\u03bc\u0003"+ + "B\u001b\u0000\u03bc\u03bd\u0001\u0000\u0000\u0000\u03bd\u03be\u0006j\f"+ + "\u0000\u03be\u03bf\u0006j\r\u0000\u03bf\u00e1\u0001\u0000\u0000\u0000"+ + "\u03c0\u03c1\u0003d,\u0000\u03c1\u03c2\u0001\u0000\u0000\u0000\u03c2\u03c3"+ + "\u0006k\u0010\u0000\u03c3\u00e3\u0001\u0000\u0000\u0000\u03c4\u03c5\u0003"+ + "h.\u0000\u03c5\u03c6\u0001\u0000\u0000\u0000\u03c6\u03c7\u0006l\u000f"+ + "\u0000\u03c7\u00e5\u0001\u0000\u0000\u0000\u03c8\u03c9\u0003l0\u0000\u03c9"+ + "\u03ca\u0001\u0000\u0000\u0000\u03ca\u03cb\u0006m\u0012\u0000\u03cb\u00e7"+ + "\u0001\u0000\u0000\u0000\u03cc\u03cd\u0005a\u0000\u0000\u03cd\u03ce\u0005"+ + "s\u0000\u0000\u03ce\u00e9\u0001\u0000\u0000\u0000\u03cf\u03d0\u0003\u00d8"+ + "f\u0000\u03d0\u03d1\u0001\u0000\u0000\u0000\u03d1\u03d2\u0006o\u0013\u0000"+ + "\u03d2\u00eb\u0001\u0000\u0000\u0000\u03d3\u03d4\u00032\u0013\u0000\u03d4"+ + "\u03d5\u0001\u0000\u0000\u0000\u03d5\u03d6\u0006p\t\u0000\u03d6\u00ed"+ + "\u0001\u0000\u0000\u0000\u03d7\u03d8\u00034\u0014\u0000\u03d8\u03d9\u0001"+ + "\u0000\u0000\u0000\u03d9\u03da\u0006q\t\u0000\u03da\u00ef\u0001\u0000"+ + "\u0000\u0000\u03db\u03dc\u00036\u0015\u0000\u03dc\u03dd\u0001\u0000\u0000"+ + "\u0000\u03dd\u03de\u0006r\t\u0000\u03de\u00f1\u0001\u0000\u0000\u0000"+ + "\u03df\u03e0\u0003B\u001b\u0000\u03e0\u03e1\u0001\u0000\u0000\u0000\u03e1"+ + "\u03e2\u0006s\f\u0000\u03e2\u03e3\u0006s\r\u0000\u03e3\u00f3\u0001\u0000"+ + "\u0000\u0000\u03e4\u03e5\u0003\u00a4L\u0000\u03e5\u03e6\u0001\u0000\u0000"+ + "\u0000\u03e6\u03e7\u0006t\n\u0000\u03e7\u03e8\u0006t\u0014\u0000\u03e8"+ + "\u00f5\u0001\u0000\u0000\u0000\u03e9\u03ea\u0005o\u0000\u0000\u03ea\u03eb"+ + "\u0005n\u0000\u0000\u03eb\u03ec\u0001\u0000\u0000\u0000\u03ec\u03ed\u0006"+ + "u\u0015\u0000\u03ed\u00f7\u0001\u0000\u0000\u0000\u03ee\u03ef\u0005w\u0000"+ + "\u0000\u03ef\u03f0\u0005i\u0000\u0000\u03f0\u03f1\u0005t\u0000\u0000\u03f1"+ + "\u03f2\u0005h\u0000\u0000\u03f2\u03f3\u0001\u0000\u0000\u0000\u03f3\u03f4"+ + "\u0006v\u0015\u0000\u03f4\u00f9\u0001\u0000\u0000\u0000\u03f5\u03f6\b"+ + "\f\u0000\u0000\u03f6\u00fb\u0001\u0000\u0000\u0000\u03f7\u03f9\u0003\u00fa"+ + "w\u0000\u03f8\u03f7\u0001\u0000\u0000\u0000\u03f9\u03fa\u0001\u0000\u0000"+ + "\u0000\u03fa\u03f8\u0001\u0000\u0000\u0000\u03fa\u03fb\u0001\u0000\u0000"+ + "\u0000\u03fb\u03fc\u0001\u0000\u0000\u0000\u03fc\u03fd\u0003\u0140\u009a"+ + "\u0000\u03fd\u03ff\u0001\u0000\u0000\u0000\u03fe\u03f8\u0001\u0000\u0000"+ + "\u0000\u03fe\u03ff\u0001\u0000\u0000\u0000\u03ff\u0401\u0001\u0000\u0000"+ + "\u0000\u0400\u0402\u0003\u00faw\u0000\u0401\u0400\u0001\u0000\u0000\u0000"+ + "\u0402\u0403\u0001\u0000\u0000\u0000\u0403\u0401\u0001\u0000\u0000\u0000"+ + "\u0403\u0404\u0001\u0000\u0000\u0000\u0404\u00fd\u0001\u0000\u0000\u0000"+ + "\u0405\u0406\u0003\u00acP\u0000\u0406\u0407\u0001\u0000\u0000\u0000\u0407"+ + "\u0408\u0006y\u0016\u0000\u0408\u00ff\u0001\u0000\u0000\u0000\u0409\u040a"+ + "\u0003\u00fcx\u0000\u040a\u040b\u0001\u0000\u0000\u0000\u040b\u040c\u0006"+ + "z\u0017\u0000\u040c\u0101\u0001\u0000\u0000\u0000\u040d\u040e\u00032\u0013"+ + "\u0000\u040e\u040f\u0001\u0000\u0000\u0000\u040f\u0410\u0006{\t\u0000"+ + "\u0410\u0103\u0001\u0000\u0000\u0000\u0411\u0412\u00034\u0014\u0000\u0412"+ + "\u0413\u0001\u0000\u0000\u0000\u0413\u0414\u0006|\t\u0000\u0414\u0105"+ + "\u0001\u0000\u0000\u0000\u0415\u0416\u00036\u0015\u0000\u0416\u0417\u0001"+ + "\u0000\u0000\u0000\u0417\u0418\u0006}\t\u0000\u0418\u0107\u0001\u0000"+ + "\u0000\u0000\u0419\u041a\u0003B\u001b\u0000\u041a\u041b\u0001\u0000\u0000"+ + "\u0000\u041b\u041c\u0006~\f\u0000\u041c\u041d\u0006~\r\u0000\u041d\u041e"+ + "\u0006~\r\u0000\u041e\u0109\u0001\u0000\u0000\u0000\u041f\u0420\u0003"+ + "d,\u0000\u0420\u0421\u0001\u0000\u0000\u0000\u0421\u0422\u0006\u007f\u0010"+ + "\u0000\u0422\u010b\u0001\u0000\u0000\u0000\u0423\u0424\u0003h.\u0000\u0424"+ + "\u0425\u0001\u0000\u0000\u0000\u0425\u0426\u0006\u0080\u000f\u0000\u0426"+ + "\u010d\u0001\u0000\u0000\u0000\u0427\u0428\u0003l0\u0000\u0428\u0429\u0001"+ + "\u0000\u0000\u0000\u0429\u042a\u0006\u0081\u0012\u0000\u042a\u010f\u0001"+ + "\u0000\u0000\u0000\u042b\u042c\u0003\u00f8v\u0000\u042c\u042d\u0001\u0000"+ + "\u0000\u0000\u042d\u042e\u0006\u0082\u0018\u0000\u042e\u0111\u0001\u0000"+ + "\u0000\u0000\u042f\u0430\u0003\u00d8f\u0000\u0430\u0431\u0001\u0000\u0000"+ + "\u0000\u0431\u0432\u0006\u0083\u0013\u0000\u0432\u0113\u0001\u0000\u0000"+ + "\u0000\u0433\u0434\u0003\u00acP\u0000\u0434\u0435\u0001\u0000\u0000\u0000"+ + "\u0435\u0436\u0006\u0084\u0016\u0000\u0436\u0115\u0001\u0000\u0000\u0000"+ + "\u0437\u0438\u00032\u0013\u0000\u0438\u0439\u0001\u0000\u0000\u0000\u0439"+ + "\u043a\u0006\u0085\t\u0000\u043a\u0117\u0001\u0000\u0000\u0000\u043b\u043c"+ + "\u00034\u0014\u0000\u043c\u043d\u0001\u0000\u0000\u0000\u043d\u043e\u0006"+ + "\u0086\t\u0000\u043e\u0119\u0001\u0000\u0000\u0000\u043f\u0440\u00036"+ + "\u0015\u0000\u0440\u0441\u0001\u0000\u0000\u0000\u0441\u0442\u0006\u0087"+ + "\t\u0000\u0442\u011b\u0001\u0000\u0000\u0000\u0443\u0444\u0003B\u001b"+ + "\u0000\u0444\u0445\u0001\u0000\u0000\u0000\u0445\u0446\u0006\u0088\f\u0000"+ + "\u0446\u0447\u0006\u0088\r\u0000\u0447\u011d\u0001\u0000\u0000\u0000\u0448"+ + "\u0449\u0003l0\u0000\u0449\u044a\u0001\u0000\u0000\u0000\u044a\u044b\u0006"+ + "\u0089\u0012\u0000\u044b\u011f\u0001\u0000\u0000\u0000\u044c\u044d\u0003"+ + "\u00acP\u0000\u044d\u044e\u0001\u0000\u0000\u0000\u044e\u044f\u0006\u008a"+ + "\u0016\u0000\u044f\u0121\u0001\u0000\u0000\u0000\u0450\u0451\u0003\u00a8"+ + "N\u0000\u0451\u0452\u0001\u0000\u0000\u0000\u0452\u0453\u0006\u008b\u0019"+ + "\u0000\u0453\u0123\u0001\u0000\u0000\u0000\u0454\u0455\u00032\u0013\u0000"+ + "\u0455\u0456\u0001\u0000\u0000\u0000\u0456\u0457\u0006\u008c\t\u0000\u0457"+ + "\u0125\u0001\u0000\u0000\u0000\u0458\u0459\u00034\u0014\u0000\u0459\u045a"+ + "\u0001\u0000\u0000\u0000\u045a\u045b\u0006\u008d\t\u0000\u045b\u0127\u0001"+ + "\u0000\u0000\u0000\u045c\u045d\u00036\u0015\u0000\u045d\u045e\u0001\u0000"+ + "\u0000\u0000\u045e\u045f\u0006\u008e\t\u0000\u045f\u0129\u0001\u0000\u0000"+ + "\u0000\u0460\u0461\u0003B\u001b\u0000\u0461\u0462\u0001\u0000\u0000\u0000"+ + "\u0462\u0463\u0006\u008f\f\u0000\u0463\u0464\u0006\u008f\r\u0000\u0464"+ + "\u012b\u0001\u0000\u0000\u0000\u0465\u0466\u0005i\u0000\u0000\u0466\u0467"+ + "\u0005n\u0000\u0000\u0467\u0468\u0005f\u0000\u0000\u0468\u0469\u0005o"+ + "\u0000\u0000\u0469\u012d\u0001\u0000\u0000\u0000\u046a\u046b\u00032\u0013"+ + "\u0000\u046b\u046c\u0001\u0000\u0000\u0000\u046c\u046d\u0006\u0091\t\u0000"+ + "\u046d\u012f\u0001\u0000\u0000\u0000\u046e\u046f\u00034\u0014\u0000\u046f"+ + "\u0470\u0001\u0000\u0000\u0000\u0470\u0471\u0006\u0092\t\u0000\u0471\u0131"+ + "\u0001\u0000\u0000\u0000\u0472\u0473\u00036\u0015\u0000\u0473\u0474\u0001"+ + "\u0000\u0000\u0000\u0474\u0475\u0006\u0093\t\u0000\u0475\u0133\u0001\u0000"+ + "\u0000\u0000\u0476\u0477\u0003B\u001b\u0000\u0477\u0478\u0001\u0000\u0000"+ + "\u0000\u0478\u0479\u0006\u0094\f\u0000\u0479\u047a\u0006\u0094\r\u0000"+ + "\u047a\u0135\u0001\u0000\u0000\u0000\u047b\u047c\u0005f\u0000\u0000\u047c"+ + "\u047d\u0005u\u0000\u0000\u047d\u047e\u0005n\u0000\u0000\u047e\u047f\u0005"+ + "c\u0000\u0000\u047f\u0480\u0005t\u0000\u0000\u0480\u0481\u0005i\u0000"+ + "\u0000\u0481\u0482\u0005o\u0000\u0000\u0482\u0483\u0005n\u0000\u0000\u0483"+ + "\u0484\u0005s\u0000\u0000\u0484\u0137\u0001\u0000\u0000\u0000\u0485\u0486"+ + "\u00032\u0013\u0000\u0486\u0487\u0001\u0000\u0000\u0000\u0487\u0488\u0006"+ + "\u0096\t\u0000\u0488\u0139\u0001\u0000\u0000\u0000\u0489\u048a\u00034"+ + "\u0014\u0000\u048a\u048b\u0001\u0000\u0000\u0000\u048b\u048c\u0006\u0097"+ + "\t\u0000\u048c\u013b\u0001\u0000\u0000\u0000\u048d\u048e\u00036\u0015"+ + "\u0000\u048e\u048f\u0001\u0000\u0000\u0000\u048f\u0490\u0006\u0098\t\u0000"+ + "\u0490\u013d\u0001\u0000\u0000\u0000\u0491\u0492\u0003\u00a6M\u0000\u0492"+ + "\u0493\u0001\u0000\u0000\u0000\u0493\u0494\u0006\u0099\u000e\u0000\u0494"+ + "\u0495\u0006\u0099\r\u0000\u0495\u013f\u0001\u0000\u0000\u0000\u0496\u0497"+ + "\u0005:\u0000\u0000\u0497\u0141\u0001\u0000\u0000\u0000\u0498\u049e\u0003"+ + "N!\u0000\u0499\u049e\u0003D\u001c\u0000\u049a\u049e\u0003l0\u0000\u049b"+ + "\u049e\u0003F\u001d\u0000\u049c\u049e\u0003T$\u0000\u049d\u0498\u0001"+ + "\u0000\u0000\u0000\u049d\u0499\u0001\u0000\u0000\u0000\u049d\u049a\u0001"+ + "\u0000\u0000\u0000\u049d\u049b\u0001\u0000\u0000\u0000\u049d\u049c\u0001"+ + "\u0000\u0000\u0000\u049e\u049f\u0001\u0000\u0000\u0000\u049f\u049d\u0001"+ + "\u0000\u0000\u0000\u049f\u04a0\u0001\u0000\u0000\u0000\u04a0\u0143\u0001"+ + "\u0000\u0000\u0000\u04a1\u04a2\u00032\u0013\u0000\u04a2\u04a3\u0001\u0000"+ + "\u0000\u0000\u04a3\u04a4\u0006\u009c\t\u0000\u04a4\u0145\u0001\u0000\u0000"+ + "\u0000\u04a5\u04a6\u00034\u0014\u0000\u04a6\u04a7\u0001\u0000\u0000\u0000"+ + "\u04a7\u04a8\u0006\u009d\t\u0000\u04a8\u0147\u0001\u0000\u0000\u0000\u04a9"+ + "\u04aa\u00036\u0015\u0000\u04aa\u04ab\u0001\u0000\u0000\u0000\u04ab\u04ac"+ + "\u0006\u009e\t\u0000\u04ac\u0149\u0001\u0000\u0000\u0000:\u0000\u0001"+ + "\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\u01e3\u01ed\u01f1\u01f4"+ + "\u01fd\u01ff\u020a\u0233\u0238\u0241\u0248\u024d\u024f\u025a\u0262\u0265"+ + "\u0267\u026c\u0271\u0277\u027e\u0283\u0289\u028c\u0294\u0298\u031d\u0322"+ + "\u0327\u0329\u032f\u036e\u0373\u0392\u0396\u039b\u03a0\u03a5\u03a7\u03ab"+ + "\u03ad\u03fa\u03fe\u0403\u049d\u049f\u001a\u0005\u0002\u0000\u0005\u0004"+ + "\u0000\u0005\u0006\u0000\u0005\u0001\u0000\u0005\u0003\u0000\u0005\n\u0000"+ + "\u0005\b\u0000\u0005\u0005\u0000\u0005\t\u0000\u0000\u0001\u0000\u0007"+ + "A\u0000\u0005\u0000\u0000\u0007\u001a\u0000\u0004\u0000\u0000\u0007B\u0000"+ + "\u0007#\u0000\u0007!\u0000\u0007\u001b\u0000\u0007%\u0000\u0007N\u0000"+ + "\u0005\u000b\u0000\u0005\u0007\u0000\u0007D\u0000\u0007X\u0000\u0007W"+ + "\u0000\u0007C\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index b4a8e60dd69a..2b887065985d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -282,4 +282,4 @@ enrichWithClause atn: -[4, 1, 110, 543, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 118, 8, 1, 10, 1, 12, 1, 121, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 128, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 143, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 155, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 162, 8, 5, 10, 5, 12, 5, 165, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 172, 8, 5, 1, 5, 1, 5, 3, 5, 176, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 184, 8, 5, 10, 5, 12, 5, 187, 9, 5, 1, 6, 1, 6, 3, 6, 191, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 198, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 203, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 210, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 216, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 224, 8, 8, 10, 8, 12, 8, 227, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 237, 8, 9, 1, 9, 1, 9, 1, 9, 5, 9, 242, 8, 9, 10, 9, 12, 9, 245, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 253, 8, 10, 10, 10, 12, 10, 256, 9, 10, 3, 10, 258, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 5, 13, 270, 8, 13, 10, 13, 12, 13, 273, 9, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 3, 14, 280, 8, 14, 1, 15, 1, 15, 1, 15, 1, 15, 5, 15, 286, 8, 15, 10, 15, 12, 15, 289, 9, 15, 1, 15, 3, 15, 292, 8, 15, 1, 15, 3, 15, 295, 8, 15, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 303, 8, 17, 10, 17, 12, 17, 306, 9, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 3, 19, 314, 8, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 320, 8, 20, 10, 20, 12, 20, 323, 9, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 3, 23, 334, 8, 23, 1, 23, 1, 23, 3, 23, 338, 8, 23, 1, 24, 1, 24, 1, 24, 1, 24, 3, 24, 344, 8, 24, 1, 25, 1, 25, 1, 25, 5, 25, 349, 8, 25, 10, 25, 12, 25, 352, 9, 25, 1, 26, 1, 26, 1, 26, 5, 26, 357, 8, 26, 10, 26, 12, 26, 360, 9, 26, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 379, 8, 29, 10, 29, 12, 29, 382, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 390, 8, 29, 10, 29, 12, 29, 393, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 401, 8, 29, 10, 29, 12, 29, 404, 9, 29, 1, 29, 1, 29, 3, 29, 408, 8, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 417, 8, 31, 10, 31, 12, 31, 420, 9, 31, 1, 32, 1, 32, 3, 32, 424, 8, 32, 1, 32, 1, 32, 3, 32, 428, 8, 32, 1, 33, 1, 33, 1, 33, 1, 33, 5, 33, 434, 8, 33, 10, 33, 12, 33, 437, 9, 33, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 443, 8, 34, 10, 34, 12, 34, 446, 9, 34, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 452, 8, 35, 10, 35, 12, 35, 455, 9, 35, 1, 36, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 465, 8, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 5, 40, 477, 8, 40, 10, 40, 12, 40, 480, 9, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 43, 1, 43, 3, 43, 490, 8, 43, 1, 44, 3, 44, 493, 8, 44, 1, 44, 1, 44, 1, 45, 3, 45, 498, 8, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 3, 52, 523, 8, 52, 1, 52, 1, 52, 1, 52, 1, 52, 5, 52, 529, 8, 52, 10, 52, 12, 52, 532, 9, 52, 3, 52, 534, 8, 52, 1, 53, 1, 53, 1, 53, 3, 53, 539, 8, 53, 1, 53, 1, 53, 1, 53, 0, 4, 2, 10, 16, 18, 54, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 0, 8, 1, 0, 60, 61, 1, 0, 62, 64, 2, 0, 68, 68, 74, 74, 1, 0, 67, 68, 2, 0, 32, 32, 36, 36, 1, 0, 39, 40, 2, 0, 38, 38, 52, 52, 2, 0, 53, 53, 55, 59, 568, 0, 108, 1, 0, 0, 0, 2, 111, 1, 0, 0, 0, 4, 127, 1, 0, 0, 0, 6, 142, 1, 0, 0, 0, 8, 144, 1, 0, 0, 0, 10, 175, 1, 0, 0, 0, 12, 202, 1, 0, 0, 0, 14, 209, 1, 0, 0, 0, 16, 215, 1, 0, 0, 0, 18, 236, 1, 0, 0, 0, 20, 246, 1, 0, 0, 0, 22, 261, 1, 0, 0, 0, 24, 263, 1, 0, 0, 0, 26, 266, 1, 0, 0, 0, 28, 279, 1, 0, 0, 0, 30, 281, 1, 0, 0, 0, 32, 296, 1, 0, 0, 0, 34, 298, 1, 0, 0, 0, 36, 307, 1, 0, 0, 0, 38, 313, 1, 0, 0, 0, 40, 315, 1, 0, 0, 0, 42, 324, 1, 0, 0, 0, 44, 328, 1, 0, 0, 0, 46, 331, 1, 0, 0, 0, 48, 339, 1, 0, 0, 0, 50, 345, 1, 0, 0, 0, 52, 353, 1, 0, 0, 0, 54, 361, 1, 0, 0, 0, 56, 363, 1, 0, 0, 0, 58, 407, 1, 0, 0, 0, 60, 409, 1, 0, 0, 0, 62, 412, 1, 0, 0, 0, 64, 421, 1, 0, 0, 0, 66, 429, 1, 0, 0, 0, 68, 438, 1, 0, 0, 0, 70, 447, 1, 0, 0, 0, 72, 456, 1, 0, 0, 0, 74, 460, 1, 0, 0, 0, 76, 466, 1, 0, 0, 0, 78, 470, 1, 0, 0, 0, 80, 473, 1, 0, 0, 0, 82, 481, 1, 0, 0, 0, 84, 485, 1, 0, 0, 0, 86, 489, 1, 0, 0, 0, 88, 492, 1, 0, 0, 0, 90, 497, 1, 0, 0, 0, 92, 501, 1, 0, 0, 0, 94, 503, 1, 0, 0, 0, 96, 505, 1, 0, 0, 0, 98, 508, 1, 0, 0, 0, 100, 512, 1, 0, 0, 0, 102, 515, 1, 0, 0, 0, 104, 518, 1, 0, 0, 0, 106, 538, 1, 0, 0, 0, 108, 109, 3, 2, 1, 0, 109, 110, 5, 0, 0, 1, 110, 1, 1, 0, 0, 0, 111, 112, 6, 1, -1, 0, 112, 113, 3, 4, 2, 0, 113, 119, 1, 0, 0, 0, 114, 115, 10, 1, 0, 0, 115, 116, 5, 26, 0, 0, 116, 118, 3, 6, 3, 0, 117, 114, 1, 0, 0, 0, 118, 121, 1, 0, 0, 0, 119, 117, 1, 0, 0, 0, 119, 120, 1, 0, 0, 0, 120, 3, 1, 0, 0, 0, 121, 119, 1, 0, 0, 0, 122, 128, 3, 96, 48, 0, 123, 128, 3, 30, 15, 0, 124, 128, 3, 24, 12, 0, 125, 128, 3, 100, 50, 0, 126, 128, 3, 102, 51, 0, 127, 122, 1, 0, 0, 0, 127, 123, 1, 0, 0, 0, 127, 124, 1, 0, 0, 0, 127, 125, 1, 0, 0, 0, 127, 126, 1, 0, 0, 0, 128, 5, 1, 0, 0, 0, 129, 143, 3, 44, 22, 0, 130, 143, 3, 48, 24, 0, 131, 143, 3, 60, 30, 0, 132, 143, 3, 66, 33, 0, 133, 143, 3, 62, 31, 0, 134, 143, 3, 46, 23, 0, 135, 143, 3, 8, 4, 0, 136, 143, 3, 68, 34, 0, 137, 143, 3, 70, 35, 0, 138, 143, 3, 74, 37, 0, 139, 143, 3, 76, 38, 0, 140, 143, 3, 104, 52, 0, 141, 143, 3, 78, 39, 0, 142, 129, 1, 0, 0, 0, 142, 130, 1, 0, 0, 0, 142, 131, 1, 0, 0, 0, 142, 132, 1, 0, 0, 0, 142, 133, 1, 0, 0, 0, 142, 134, 1, 0, 0, 0, 142, 135, 1, 0, 0, 0, 142, 136, 1, 0, 0, 0, 142, 137, 1, 0, 0, 0, 142, 138, 1, 0, 0, 0, 142, 139, 1, 0, 0, 0, 142, 140, 1, 0, 0, 0, 142, 141, 1, 0, 0, 0, 143, 7, 1, 0, 0, 0, 144, 145, 5, 18, 0, 0, 145, 146, 3, 10, 5, 0, 146, 9, 1, 0, 0, 0, 147, 148, 6, 5, -1, 0, 148, 149, 5, 45, 0, 0, 149, 176, 3, 10, 5, 7, 150, 176, 3, 14, 7, 0, 151, 176, 3, 12, 6, 0, 152, 154, 3, 14, 7, 0, 153, 155, 5, 45, 0, 0, 154, 153, 1, 0, 0, 0, 154, 155, 1, 0, 0, 0, 155, 156, 1, 0, 0, 0, 156, 157, 5, 42, 0, 0, 157, 158, 5, 41, 0, 0, 158, 163, 3, 14, 7, 0, 159, 160, 5, 35, 0, 0, 160, 162, 3, 14, 7, 0, 161, 159, 1, 0, 0, 0, 162, 165, 1, 0, 0, 0, 163, 161, 1, 0, 0, 0, 163, 164, 1, 0, 0, 0, 164, 166, 1, 0, 0, 0, 165, 163, 1, 0, 0, 0, 166, 167, 5, 51, 0, 0, 167, 176, 1, 0, 0, 0, 168, 169, 3, 14, 7, 0, 169, 171, 5, 43, 0, 0, 170, 172, 5, 45, 0, 0, 171, 170, 1, 0, 0, 0, 171, 172, 1, 0, 0, 0, 172, 173, 1, 0, 0, 0, 173, 174, 5, 46, 0, 0, 174, 176, 1, 0, 0, 0, 175, 147, 1, 0, 0, 0, 175, 150, 1, 0, 0, 0, 175, 151, 1, 0, 0, 0, 175, 152, 1, 0, 0, 0, 175, 168, 1, 0, 0, 0, 176, 185, 1, 0, 0, 0, 177, 178, 10, 4, 0, 0, 178, 179, 5, 31, 0, 0, 179, 184, 3, 10, 5, 5, 180, 181, 10, 3, 0, 0, 181, 182, 5, 48, 0, 0, 182, 184, 3, 10, 5, 4, 183, 177, 1, 0, 0, 0, 183, 180, 1, 0, 0, 0, 184, 187, 1, 0, 0, 0, 185, 183, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 11, 1, 0, 0, 0, 187, 185, 1, 0, 0, 0, 188, 190, 3, 14, 7, 0, 189, 191, 5, 45, 0, 0, 190, 189, 1, 0, 0, 0, 190, 191, 1, 0, 0, 0, 191, 192, 1, 0, 0, 0, 192, 193, 5, 44, 0, 0, 193, 194, 3, 92, 46, 0, 194, 203, 1, 0, 0, 0, 195, 197, 3, 14, 7, 0, 196, 198, 5, 45, 0, 0, 197, 196, 1, 0, 0, 0, 197, 198, 1, 0, 0, 0, 198, 199, 1, 0, 0, 0, 199, 200, 5, 50, 0, 0, 200, 201, 3, 92, 46, 0, 201, 203, 1, 0, 0, 0, 202, 188, 1, 0, 0, 0, 202, 195, 1, 0, 0, 0, 203, 13, 1, 0, 0, 0, 204, 210, 3, 16, 8, 0, 205, 206, 3, 16, 8, 0, 206, 207, 3, 94, 47, 0, 207, 208, 3, 16, 8, 0, 208, 210, 1, 0, 0, 0, 209, 204, 1, 0, 0, 0, 209, 205, 1, 0, 0, 0, 210, 15, 1, 0, 0, 0, 211, 212, 6, 8, -1, 0, 212, 216, 3, 18, 9, 0, 213, 214, 7, 0, 0, 0, 214, 216, 3, 16, 8, 3, 215, 211, 1, 0, 0, 0, 215, 213, 1, 0, 0, 0, 216, 225, 1, 0, 0, 0, 217, 218, 10, 2, 0, 0, 218, 219, 7, 1, 0, 0, 219, 224, 3, 16, 8, 3, 220, 221, 10, 1, 0, 0, 221, 222, 7, 0, 0, 0, 222, 224, 3, 16, 8, 2, 223, 217, 1, 0, 0, 0, 223, 220, 1, 0, 0, 0, 224, 227, 1, 0, 0, 0, 225, 223, 1, 0, 0, 0, 225, 226, 1, 0, 0, 0, 226, 17, 1, 0, 0, 0, 227, 225, 1, 0, 0, 0, 228, 229, 6, 9, -1, 0, 229, 237, 3, 58, 29, 0, 230, 237, 3, 50, 25, 0, 231, 237, 3, 20, 10, 0, 232, 233, 5, 41, 0, 0, 233, 234, 3, 10, 5, 0, 234, 235, 5, 51, 0, 0, 235, 237, 1, 0, 0, 0, 236, 228, 1, 0, 0, 0, 236, 230, 1, 0, 0, 0, 236, 231, 1, 0, 0, 0, 236, 232, 1, 0, 0, 0, 237, 243, 1, 0, 0, 0, 238, 239, 10, 1, 0, 0, 239, 240, 5, 34, 0, 0, 240, 242, 3, 22, 11, 0, 241, 238, 1, 0, 0, 0, 242, 245, 1, 0, 0, 0, 243, 241, 1, 0, 0, 0, 243, 244, 1, 0, 0, 0, 244, 19, 1, 0, 0, 0, 245, 243, 1, 0, 0, 0, 246, 247, 3, 54, 27, 0, 247, 257, 5, 41, 0, 0, 248, 258, 5, 62, 0, 0, 249, 254, 3, 10, 5, 0, 250, 251, 5, 35, 0, 0, 251, 253, 3, 10, 5, 0, 252, 250, 1, 0, 0, 0, 253, 256, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 254, 255, 1, 0, 0, 0, 255, 258, 1, 0, 0, 0, 256, 254, 1, 0, 0, 0, 257, 248, 1, 0, 0, 0, 257, 249, 1, 0, 0, 0, 257, 258, 1, 0, 0, 0, 258, 259, 1, 0, 0, 0, 259, 260, 5, 51, 0, 0, 260, 21, 1, 0, 0, 0, 261, 262, 3, 54, 27, 0, 262, 23, 1, 0, 0, 0, 263, 264, 5, 14, 0, 0, 264, 265, 3, 26, 13, 0, 265, 25, 1, 0, 0, 0, 266, 271, 3, 28, 14, 0, 267, 268, 5, 35, 0, 0, 268, 270, 3, 28, 14, 0, 269, 267, 1, 0, 0, 0, 270, 273, 1, 0, 0, 0, 271, 269, 1, 0, 0, 0, 271, 272, 1, 0, 0, 0, 272, 27, 1, 0, 0, 0, 273, 271, 1, 0, 0, 0, 274, 280, 3, 10, 5, 0, 275, 276, 3, 50, 25, 0, 276, 277, 5, 33, 0, 0, 277, 278, 3, 10, 5, 0, 278, 280, 1, 0, 0, 0, 279, 274, 1, 0, 0, 0, 279, 275, 1, 0, 0, 0, 280, 29, 1, 0, 0, 0, 281, 282, 5, 6, 0, 0, 282, 287, 3, 32, 16, 0, 283, 284, 5, 35, 0, 0, 284, 286, 3, 32, 16, 0, 285, 283, 1, 0, 0, 0, 286, 289, 1, 0, 0, 0, 287, 285, 1, 0, 0, 0, 287, 288, 1, 0, 0, 0, 288, 291, 1, 0, 0, 0, 289, 287, 1, 0, 0, 0, 290, 292, 3, 38, 19, 0, 291, 290, 1, 0, 0, 0, 291, 292, 1, 0, 0, 0, 292, 294, 1, 0, 0, 0, 293, 295, 3, 34, 17, 0, 294, 293, 1, 0, 0, 0, 294, 295, 1, 0, 0, 0, 295, 31, 1, 0, 0, 0, 296, 297, 7, 2, 0, 0, 297, 33, 1, 0, 0, 0, 298, 299, 5, 72, 0, 0, 299, 304, 3, 36, 18, 0, 300, 301, 5, 35, 0, 0, 301, 303, 3, 36, 18, 0, 302, 300, 1, 0, 0, 0, 303, 306, 1, 0, 0, 0, 304, 302, 1, 0, 0, 0, 304, 305, 1, 0, 0, 0, 305, 35, 1, 0, 0, 0, 306, 304, 1, 0, 0, 0, 307, 308, 3, 92, 46, 0, 308, 309, 5, 33, 0, 0, 309, 310, 3, 92, 46, 0, 310, 37, 1, 0, 0, 0, 311, 314, 3, 40, 20, 0, 312, 314, 3, 42, 21, 0, 313, 311, 1, 0, 0, 0, 313, 312, 1, 0, 0, 0, 314, 39, 1, 0, 0, 0, 315, 316, 5, 73, 0, 0, 316, 321, 3, 32, 16, 0, 317, 318, 5, 35, 0, 0, 318, 320, 3, 32, 16, 0, 319, 317, 1, 0, 0, 0, 320, 323, 1, 0, 0, 0, 321, 319, 1, 0, 0, 0, 321, 322, 1, 0, 0, 0, 322, 41, 1, 0, 0, 0, 323, 321, 1, 0, 0, 0, 324, 325, 5, 65, 0, 0, 325, 326, 3, 40, 20, 0, 326, 327, 5, 66, 0, 0, 327, 43, 1, 0, 0, 0, 328, 329, 5, 4, 0, 0, 329, 330, 3, 26, 13, 0, 330, 45, 1, 0, 0, 0, 331, 333, 5, 17, 0, 0, 332, 334, 3, 26, 13, 0, 333, 332, 1, 0, 0, 0, 333, 334, 1, 0, 0, 0, 334, 337, 1, 0, 0, 0, 335, 336, 5, 30, 0, 0, 336, 338, 3, 26, 13, 0, 337, 335, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 47, 1, 0, 0, 0, 339, 340, 5, 8, 0, 0, 340, 343, 3, 26, 13, 0, 341, 342, 5, 30, 0, 0, 342, 344, 3, 26, 13, 0, 343, 341, 1, 0, 0, 0, 343, 344, 1, 0, 0, 0, 344, 49, 1, 0, 0, 0, 345, 350, 3, 54, 27, 0, 346, 347, 5, 37, 0, 0, 347, 349, 3, 54, 27, 0, 348, 346, 1, 0, 0, 0, 349, 352, 1, 0, 0, 0, 350, 348, 1, 0, 0, 0, 350, 351, 1, 0, 0, 0, 351, 51, 1, 0, 0, 0, 352, 350, 1, 0, 0, 0, 353, 358, 3, 56, 28, 0, 354, 355, 5, 37, 0, 0, 355, 357, 3, 56, 28, 0, 356, 354, 1, 0, 0, 0, 357, 360, 1, 0, 0, 0, 358, 356, 1, 0, 0, 0, 358, 359, 1, 0, 0, 0, 359, 53, 1, 0, 0, 0, 360, 358, 1, 0, 0, 0, 361, 362, 7, 3, 0, 0, 362, 55, 1, 0, 0, 0, 363, 364, 5, 78, 0, 0, 364, 57, 1, 0, 0, 0, 365, 408, 5, 46, 0, 0, 366, 367, 3, 90, 45, 0, 367, 368, 5, 67, 0, 0, 368, 408, 1, 0, 0, 0, 369, 408, 3, 88, 44, 0, 370, 408, 3, 90, 45, 0, 371, 408, 3, 84, 42, 0, 372, 408, 5, 49, 0, 0, 373, 408, 3, 92, 46, 0, 374, 375, 5, 65, 0, 0, 375, 380, 3, 86, 43, 0, 376, 377, 5, 35, 0, 0, 377, 379, 3, 86, 43, 0, 378, 376, 1, 0, 0, 0, 379, 382, 1, 0, 0, 0, 380, 378, 1, 0, 0, 0, 380, 381, 1, 0, 0, 0, 381, 383, 1, 0, 0, 0, 382, 380, 1, 0, 0, 0, 383, 384, 5, 66, 0, 0, 384, 408, 1, 0, 0, 0, 385, 386, 5, 65, 0, 0, 386, 391, 3, 84, 42, 0, 387, 388, 5, 35, 0, 0, 388, 390, 3, 84, 42, 0, 389, 387, 1, 0, 0, 0, 390, 393, 1, 0, 0, 0, 391, 389, 1, 0, 0, 0, 391, 392, 1, 0, 0, 0, 392, 394, 1, 0, 0, 0, 393, 391, 1, 0, 0, 0, 394, 395, 5, 66, 0, 0, 395, 408, 1, 0, 0, 0, 396, 397, 5, 65, 0, 0, 397, 402, 3, 92, 46, 0, 398, 399, 5, 35, 0, 0, 399, 401, 3, 92, 46, 0, 400, 398, 1, 0, 0, 0, 401, 404, 1, 0, 0, 0, 402, 400, 1, 0, 0, 0, 402, 403, 1, 0, 0, 0, 403, 405, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 405, 406, 5, 66, 0, 0, 406, 408, 1, 0, 0, 0, 407, 365, 1, 0, 0, 0, 407, 366, 1, 0, 0, 0, 407, 369, 1, 0, 0, 0, 407, 370, 1, 0, 0, 0, 407, 371, 1, 0, 0, 0, 407, 372, 1, 0, 0, 0, 407, 373, 1, 0, 0, 0, 407, 374, 1, 0, 0, 0, 407, 385, 1, 0, 0, 0, 407, 396, 1, 0, 0, 0, 408, 59, 1, 0, 0, 0, 409, 410, 5, 10, 0, 0, 410, 411, 5, 28, 0, 0, 411, 61, 1, 0, 0, 0, 412, 413, 5, 16, 0, 0, 413, 418, 3, 64, 32, 0, 414, 415, 5, 35, 0, 0, 415, 417, 3, 64, 32, 0, 416, 414, 1, 0, 0, 0, 417, 420, 1, 0, 0, 0, 418, 416, 1, 0, 0, 0, 418, 419, 1, 0, 0, 0, 419, 63, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 421, 423, 3, 10, 5, 0, 422, 424, 7, 4, 0, 0, 423, 422, 1, 0, 0, 0, 423, 424, 1, 0, 0, 0, 424, 427, 1, 0, 0, 0, 425, 426, 5, 47, 0, 0, 426, 428, 7, 5, 0, 0, 427, 425, 1, 0, 0, 0, 427, 428, 1, 0, 0, 0, 428, 65, 1, 0, 0, 0, 429, 430, 5, 9, 0, 0, 430, 435, 3, 52, 26, 0, 431, 432, 5, 35, 0, 0, 432, 434, 3, 52, 26, 0, 433, 431, 1, 0, 0, 0, 434, 437, 1, 0, 0, 0, 435, 433, 1, 0, 0, 0, 435, 436, 1, 0, 0, 0, 436, 67, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 438, 439, 5, 2, 0, 0, 439, 444, 3, 52, 26, 0, 440, 441, 5, 35, 0, 0, 441, 443, 3, 52, 26, 0, 442, 440, 1, 0, 0, 0, 443, 446, 1, 0, 0, 0, 444, 442, 1, 0, 0, 0, 444, 445, 1, 0, 0, 0, 445, 69, 1, 0, 0, 0, 446, 444, 1, 0, 0, 0, 447, 448, 5, 13, 0, 0, 448, 453, 3, 72, 36, 0, 449, 450, 5, 35, 0, 0, 450, 452, 3, 72, 36, 0, 451, 449, 1, 0, 0, 0, 452, 455, 1, 0, 0, 0, 453, 451, 1, 0, 0, 0, 453, 454, 1, 0, 0, 0, 454, 71, 1, 0, 0, 0, 455, 453, 1, 0, 0, 0, 456, 457, 3, 52, 26, 0, 457, 458, 5, 82, 0, 0, 458, 459, 3, 52, 26, 0, 459, 73, 1, 0, 0, 0, 460, 461, 5, 1, 0, 0, 461, 462, 3, 18, 9, 0, 462, 464, 3, 92, 46, 0, 463, 465, 3, 80, 40, 0, 464, 463, 1, 0, 0, 0, 464, 465, 1, 0, 0, 0, 465, 75, 1, 0, 0, 0, 466, 467, 5, 7, 0, 0, 467, 468, 3, 18, 9, 0, 468, 469, 3, 92, 46, 0, 469, 77, 1, 0, 0, 0, 470, 471, 5, 12, 0, 0, 471, 472, 3, 50, 25, 0, 472, 79, 1, 0, 0, 0, 473, 478, 3, 82, 41, 0, 474, 475, 5, 35, 0, 0, 475, 477, 3, 82, 41, 0, 476, 474, 1, 0, 0, 0, 477, 480, 1, 0, 0, 0, 478, 476, 1, 0, 0, 0, 478, 479, 1, 0, 0, 0, 479, 81, 1, 0, 0, 0, 480, 478, 1, 0, 0, 0, 481, 482, 3, 54, 27, 0, 482, 483, 5, 33, 0, 0, 483, 484, 3, 58, 29, 0, 484, 83, 1, 0, 0, 0, 485, 486, 7, 6, 0, 0, 486, 85, 1, 0, 0, 0, 487, 490, 3, 88, 44, 0, 488, 490, 3, 90, 45, 0, 489, 487, 1, 0, 0, 0, 489, 488, 1, 0, 0, 0, 490, 87, 1, 0, 0, 0, 491, 493, 7, 0, 0, 0, 492, 491, 1, 0, 0, 0, 492, 493, 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 495, 5, 29, 0, 0, 495, 89, 1, 0, 0, 0, 496, 498, 7, 0, 0, 0, 497, 496, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 499, 1, 0, 0, 0, 499, 500, 5, 28, 0, 0, 500, 91, 1, 0, 0, 0, 501, 502, 5, 27, 0, 0, 502, 93, 1, 0, 0, 0, 503, 504, 7, 7, 0, 0, 504, 95, 1, 0, 0, 0, 505, 506, 5, 5, 0, 0, 506, 507, 3, 98, 49, 0, 507, 97, 1, 0, 0, 0, 508, 509, 5, 65, 0, 0, 509, 510, 3, 2, 1, 0, 510, 511, 5, 66, 0, 0, 511, 99, 1, 0, 0, 0, 512, 513, 5, 15, 0, 0, 513, 514, 5, 98, 0, 0, 514, 101, 1, 0, 0, 0, 515, 516, 5, 11, 0, 0, 516, 517, 5, 102, 0, 0, 517, 103, 1, 0, 0, 0, 518, 519, 5, 3, 0, 0, 519, 522, 5, 88, 0, 0, 520, 521, 5, 86, 0, 0, 521, 523, 3, 52, 26, 0, 522, 520, 1, 0, 0, 0, 522, 523, 1, 0, 0, 0, 523, 533, 1, 0, 0, 0, 524, 525, 5, 87, 0, 0, 525, 530, 3, 106, 53, 0, 526, 527, 5, 35, 0, 0, 527, 529, 3, 106, 53, 0, 528, 526, 1, 0, 0, 0, 529, 532, 1, 0, 0, 0, 530, 528, 1, 0, 0, 0, 530, 531, 1, 0, 0, 0, 531, 534, 1, 0, 0, 0, 532, 530, 1, 0, 0, 0, 533, 524, 1, 0, 0, 0, 533, 534, 1, 0, 0, 0, 534, 105, 1, 0, 0, 0, 535, 536, 3, 52, 26, 0, 536, 537, 5, 33, 0, 0, 537, 539, 1, 0, 0, 0, 538, 535, 1, 0, 0, 0, 538, 539, 1, 0, 0, 0, 539, 540, 1, 0, 0, 0, 540, 541, 3, 52, 26, 0, 541, 107, 1, 0, 0, 0, 52, 119, 127, 142, 154, 163, 171, 175, 183, 185, 190, 197, 202, 209, 215, 223, 225, 236, 243, 254, 257, 271, 279, 287, 291, 294, 304, 313, 321, 333, 337, 343, 350, 358, 380, 391, 402, 407, 418, 423, 427, 435, 444, 453, 464, 478, 489, 492, 497, 522, 530, 533, 538] \ No newline at end of file +[4, 1, 110, 543, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 118, 8, 1, 10, 1, 12, 1, 121, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 128, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 143, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 155, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 162, 8, 5, 10, 5, 12, 5, 165, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 172, 8, 5, 1, 5, 1, 5, 3, 5, 176, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 184, 8, 5, 10, 5, 12, 5, 187, 9, 5, 1, 6, 1, 6, 3, 6, 191, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 198, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 203, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 210, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 216, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 224, 8, 8, 10, 8, 12, 8, 227, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 237, 8, 9, 1, 9, 1, 9, 1, 9, 5, 9, 242, 8, 9, 10, 9, 12, 9, 245, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 253, 8, 10, 10, 10, 12, 10, 256, 9, 10, 3, 10, 258, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 5, 13, 270, 8, 13, 10, 13, 12, 13, 273, 9, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 3, 14, 280, 8, 14, 1, 15, 1, 15, 1, 15, 1, 15, 5, 15, 286, 8, 15, 10, 15, 12, 15, 289, 9, 15, 1, 15, 3, 15, 292, 8, 15, 1, 15, 3, 15, 295, 8, 15, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 303, 8, 17, 10, 17, 12, 17, 306, 9, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 3, 19, 314, 8, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 320, 8, 20, 10, 20, 12, 20, 323, 9, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 3, 23, 334, 8, 23, 1, 23, 1, 23, 3, 23, 338, 8, 23, 1, 24, 1, 24, 1, 24, 1, 24, 3, 24, 344, 8, 24, 1, 25, 1, 25, 1, 25, 5, 25, 349, 8, 25, 10, 25, 12, 25, 352, 9, 25, 1, 26, 1, 26, 1, 26, 5, 26, 357, 8, 26, 10, 26, 12, 26, 360, 9, 26, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 379, 8, 29, 10, 29, 12, 29, 382, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 390, 8, 29, 10, 29, 12, 29, 393, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 401, 8, 29, 10, 29, 12, 29, 404, 9, 29, 1, 29, 1, 29, 3, 29, 408, 8, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 417, 8, 31, 10, 31, 12, 31, 420, 9, 31, 1, 32, 1, 32, 3, 32, 424, 8, 32, 1, 32, 1, 32, 3, 32, 428, 8, 32, 1, 33, 1, 33, 1, 33, 1, 33, 5, 33, 434, 8, 33, 10, 33, 12, 33, 437, 9, 33, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 443, 8, 34, 10, 34, 12, 34, 446, 9, 34, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 452, 8, 35, 10, 35, 12, 35, 455, 9, 35, 1, 36, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 465, 8, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 5, 40, 477, 8, 40, 10, 40, 12, 40, 480, 9, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 43, 1, 43, 3, 43, 490, 8, 43, 1, 44, 3, 44, 493, 8, 44, 1, 44, 1, 44, 1, 45, 3, 45, 498, 8, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 3, 52, 523, 8, 52, 1, 52, 1, 52, 1, 52, 1, 52, 5, 52, 529, 8, 52, 10, 52, 12, 52, 532, 9, 52, 3, 52, 534, 8, 52, 1, 53, 1, 53, 1, 53, 3, 53, 539, 8, 53, 1, 53, 1, 53, 1, 53, 0, 4, 2, 10, 16, 18, 54, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 0, 7, 1, 0, 60, 61, 1, 0, 62, 64, 1, 0, 67, 68, 2, 0, 32, 32, 36, 36, 1, 0, 39, 40, 2, 0, 38, 38, 52, 52, 2, 0, 53, 53, 55, 59, 568, 0, 108, 1, 0, 0, 0, 2, 111, 1, 0, 0, 0, 4, 127, 1, 0, 0, 0, 6, 142, 1, 0, 0, 0, 8, 144, 1, 0, 0, 0, 10, 175, 1, 0, 0, 0, 12, 202, 1, 0, 0, 0, 14, 209, 1, 0, 0, 0, 16, 215, 1, 0, 0, 0, 18, 236, 1, 0, 0, 0, 20, 246, 1, 0, 0, 0, 22, 261, 1, 0, 0, 0, 24, 263, 1, 0, 0, 0, 26, 266, 1, 0, 0, 0, 28, 279, 1, 0, 0, 0, 30, 281, 1, 0, 0, 0, 32, 296, 1, 0, 0, 0, 34, 298, 1, 0, 0, 0, 36, 307, 1, 0, 0, 0, 38, 313, 1, 0, 0, 0, 40, 315, 1, 0, 0, 0, 42, 324, 1, 0, 0, 0, 44, 328, 1, 0, 0, 0, 46, 331, 1, 0, 0, 0, 48, 339, 1, 0, 0, 0, 50, 345, 1, 0, 0, 0, 52, 353, 1, 0, 0, 0, 54, 361, 1, 0, 0, 0, 56, 363, 1, 0, 0, 0, 58, 407, 1, 0, 0, 0, 60, 409, 1, 0, 0, 0, 62, 412, 1, 0, 0, 0, 64, 421, 1, 0, 0, 0, 66, 429, 1, 0, 0, 0, 68, 438, 1, 0, 0, 0, 70, 447, 1, 0, 0, 0, 72, 456, 1, 0, 0, 0, 74, 460, 1, 0, 0, 0, 76, 466, 1, 0, 0, 0, 78, 470, 1, 0, 0, 0, 80, 473, 1, 0, 0, 0, 82, 481, 1, 0, 0, 0, 84, 485, 1, 0, 0, 0, 86, 489, 1, 0, 0, 0, 88, 492, 1, 0, 0, 0, 90, 497, 1, 0, 0, 0, 92, 501, 1, 0, 0, 0, 94, 503, 1, 0, 0, 0, 96, 505, 1, 0, 0, 0, 98, 508, 1, 0, 0, 0, 100, 512, 1, 0, 0, 0, 102, 515, 1, 0, 0, 0, 104, 518, 1, 0, 0, 0, 106, 538, 1, 0, 0, 0, 108, 109, 3, 2, 1, 0, 109, 110, 5, 0, 0, 1, 110, 1, 1, 0, 0, 0, 111, 112, 6, 1, -1, 0, 112, 113, 3, 4, 2, 0, 113, 119, 1, 0, 0, 0, 114, 115, 10, 1, 0, 0, 115, 116, 5, 26, 0, 0, 116, 118, 3, 6, 3, 0, 117, 114, 1, 0, 0, 0, 118, 121, 1, 0, 0, 0, 119, 117, 1, 0, 0, 0, 119, 120, 1, 0, 0, 0, 120, 3, 1, 0, 0, 0, 121, 119, 1, 0, 0, 0, 122, 128, 3, 96, 48, 0, 123, 128, 3, 30, 15, 0, 124, 128, 3, 24, 12, 0, 125, 128, 3, 100, 50, 0, 126, 128, 3, 102, 51, 0, 127, 122, 1, 0, 0, 0, 127, 123, 1, 0, 0, 0, 127, 124, 1, 0, 0, 0, 127, 125, 1, 0, 0, 0, 127, 126, 1, 0, 0, 0, 128, 5, 1, 0, 0, 0, 129, 143, 3, 44, 22, 0, 130, 143, 3, 48, 24, 0, 131, 143, 3, 60, 30, 0, 132, 143, 3, 66, 33, 0, 133, 143, 3, 62, 31, 0, 134, 143, 3, 46, 23, 0, 135, 143, 3, 8, 4, 0, 136, 143, 3, 68, 34, 0, 137, 143, 3, 70, 35, 0, 138, 143, 3, 74, 37, 0, 139, 143, 3, 76, 38, 0, 140, 143, 3, 104, 52, 0, 141, 143, 3, 78, 39, 0, 142, 129, 1, 0, 0, 0, 142, 130, 1, 0, 0, 0, 142, 131, 1, 0, 0, 0, 142, 132, 1, 0, 0, 0, 142, 133, 1, 0, 0, 0, 142, 134, 1, 0, 0, 0, 142, 135, 1, 0, 0, 0, 142, 136, 1, 0, 0, 0, 142, 137, 1, 0, 0, 0, 142, 138, 1, 0, 0, 0, 142, 139, 1, 0, 0, 0, 142, 140, 1, 0, 0, 0, 142, 141, 1, 0, 0, 0, 143, 7, 1, 0, 0, 0, 144, 145, 5, 18, 0, 0, 145, 146, 3, 10, 5, 0, 146, 9, 1, 0, 0, 0, 147, 148, 6, 5, -1, 0, 148, 149, 5, 45, 0, 0, 149, 176, 3, 10, 5, 7, 150, 176, 3, 14, 7, 0, 151, 176, 3, 12, 6, 0, 152, 154, 3, 14, 7, 0, 153, 155, 5, 45, 0, 0, 154, 153, 1, 0, 0, 0, 154, 155, 1, 0, 0, 0, 155, 156, 1, 0, 0, 0, 156, 157, 5, 42, 0, 0, 157, 158, 5, 41, 0, 0, 158, 163, 3, 14, 7, 0, 159, 160, 5, 35, 0, 0, 160, 162, 3, 14, 7, 0, 161, 159, 1, 0, 0, 0, 162, 165, 1, 0, 0, 0, 163, 161, 1, 0, 0, 0, 163, 164, 1, 0, 0, 0, 164, 166, 1, 0, 0, 0, 165, 163, 1, 0, 0, 0, 166, 167, 5, 51, 0, 0, 167, 176, 1, 0, 0, 0, 168, 169, 3, 14, 7, 0, 169, 171, 5, 43, 0, 0, 170, 172, 5, 45, 0, 0, 171, 170, 1, 0, 0, 0, 171, 172, 1, 0, 0, 0, 172, 173, 1, 0, 0, 0, 173, 174, 5, 46, 0, 0, 174, 176, 1, 0, 0, 0, 175, 147, 1, 0, 0, 0, 175, 150, 1, 0, 0, 0, 175, 151, 1, 0, 0, 0, 175, 152, 1, 0, 0, 0, 175, 168, 1, 0, 0, 0, 176, 185, 1, 0, 0, 0, 177, 178, 10, 4, 0, 0, 178, 179, 5, 31, 0, 0, 179, 184, 3, 10, 5, 5, 180, 181, 10, 3, 0, 0, 181, 182, 5, 48, 0, 0, 182, 184, 3, 10, 5, 4, 183, 177, 1, 0, 0, 0, 183, 180, 1, 0, 0, 0, 184, 187, 1, 0, 0, 0, 185, 183, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 11, 1, 0, 0, 0, 187, 185, 1, 0, 0, 0, 188, 190, 3, 14, 7, 0, 189, 191, 5, 45, 0, 0, 190, 189, 1, 0, 0, 0, 190, 191, 1, 0, 0, 0, 191, 192, 1, 0, 0, 0, 192, 193, 5, 44, 0, 0, 193, 194, 3, 92, 46, 0, 194, 203, 1, 0, 0, 0, 195, 197, 3, 14, 7, 0, 196, 198, 5, 45, 0, 0, 197, 196, 1, 0, 0, 0, 197, 198, 1, 0, 0, 0, 198, 199, 1, 0, 0, 0, 199, 200, 5, 50, 0, 0, 200, 201, 3, 92, 46, 0, 201, 203, 1, 0, 0, 0, 202, 188, 1, 0, 0, 0, 202, 195, 1, 0, 0, 0, 203, 13, 1, 0, 0, 0, 204, 210, 3, 16, 8, 0, 205, 206, 3, 16, 8, 0, 206, 207, 3, 94, 47, 0, 207, 208, 3, 16, 8, 0, 208, 210, 1, 0, 0, 0, 209, 204, 1, 0, 0, 0, 209, 205, 1, 0, 0, 0, 210, 15, 1, 0, 0, 0, 211, 212, 6, 8, -1, 0, 212, 216, 3, 18, 9, 0, 213, 214, 7, 0, 0, 0, 214, 216, 3, 16, 8, 3, 215, 211, 1, 0, 0, 0, 215, 213, 1, 0, 0, 0, 216, 225, 1, 0, 0, 0, 217, 218, 10, 2, 0, 0, 218, 219, 7, 1, 0, 0, 219, 224, 3, 16, 8, 3, 220, 221, 10, 1, 0, 0, 221, 222, 7, 0, 0, 0, 222, 224, 3, 16, 8, 2, 223, 217, 1, 0, 0, 0, 223, 220, 1, 0, 0, 0, 224, 227, 1, 0, 0, 0, 225, 223, 1, 0, 0, 0, 225, 226, 1, 0, 0, 0, 226, 17, 1, 0, 0, 0, 227, 225, 1, 0, 0, 0, 228, 229, 6, 9, -1, 0, 229, 237, 3, 58, 29, 0, 230, 237, 3, 50, 25, 0, 231, 237, 3, 20, 10, 0, 232, 233, 5, 41, 0, 0, 233, 234, 3, 10, 5, 0, 234, 235, 5, 51, 0, 0, 235, 237, 1, 0, 0, 0, 236, 228, 1, 0, 0, 0, 236, 230, 1, 0, 0, 0, 236, 231, 1, 0, 0, 0, 236, 232, 1, 0, 0, 0, 237, 243, 1, 0, 0, 0, 238, 239, 10, 1, 0, 0, 239, 240, 5, 34, 0, 0, 240, 242, 3, 22, 11, 0, 241, 238, 1, 0, 0, 0, 242, 245, 1, 0, 0, 0, 243, 241, 1, 0, 0, 0, 243, 244, 1, 0, 0, 0, 244, 19, 1, 0, 0, 0, 245, 243, 1, 0, 0, 0, 246, 247, 3, 54, 27, 0, 247, 257, 5, 41, 0, 0, 248, 258, 5, 62, 0, 0, 249, 254, 3, 10, 5, 0, 250, 251, 5, 35, 0, 0, 251, 253, 3, 10, 5, 0, 252, 250, 1, 0, 0, 0, 253, 256, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 254, 255, 1, 0, 0, 0, 255, 258, 1, 0, 0, 0, 256, 254, 1, 0, 0, 0, 257, 248, 1, 0, 0, 0, 257, 249, 1, 0, 0, 0, 257, 258, 1, 0, 0, 0, 258, 259, 1, 0, 0, 0, 259, 260, 5, 51, 0, 0, 260, 21, 1, 0, 0, 0, 261, 262, 3, 54, 27, 0, 262, 23, 1, 0, 0, 0, 263, 264, 5, 14, 0, 0, 264, 265, 3, 26, 13, 0, 265, 25, 1, 0, 0, 0, 266, 271, 3, 28, 14, 0, 267, 268, 5, 35, 0, 0, 268, 270, 3, 28, 14, 0, 269, 267, 1, 0, 0, 0, 270, 273, 1, 0, 0, 0, 271, 269, 1, 0, 0, 0, 271, 272, 1, 0, 0, 0, 272, 27, 1, 0, 0, 0, 273, 271, 1, 0, 0, 0, 274, 280, 3, 10, 5, 0, 275, 276, 3, 50, 25, 0, 276, 277, 5, 33, 0, 0, 277, 278, 3, 10, 5, 0, 278, 280, 1, 0, 0, 0, 279, 274, 1, 0, 0, 0, 279, 275, 1, 0, 0, 0, 280, 29, 1, 0, 0, 0, 281, 282, 5, 6, 0, 0, 282, 287, 3, 32, 16, 0, 283, 284, 5, 35, 0, 0, 284, 286, 3, 32, 16, 0, 285, 283, 1, 0, 0, 0, 286, 289, 1, 0, 0, 0, 287, 285, 1, 0, 0, 0, 287, 288, 1, 0, 0, 0, 288, 291, 1, 0, 0, 0, 289, 287, 1, 0, 0, 0, 290, 292, 3, 38, 19, 0, 291, 290, 1, 0, 0, 0, 291, 292, 1, 0, 0, 0, 292, 294, 1, 0, 0, 0, 293, 295, 3, 34, 17, 0, 294, 293, 1, 0, 0, 0, 294, 295, 1, 0, 0, 0, 295, 31, 1, 0, 0, 0, 296, 297, 5, 74, 0, 0, 297, 33, 1, 0, 0, 0, 298, 299, 5, 72, 0, 0, 299, 304, 3, 36, 18, 0, 300, 301, 5, 35, 0, 0, 301, 303, 3, 36, 18, 0, 302, 300, 1, 0, 0, 0, 303, 306, 1, 0, 0, 0, 304, 302, 1, 0, 0, 0, 304, 305, 1, 0, 0, 0, 305, 35, 1, 0, 0, 0, 306, 304, 1, 0, 0, 0, 307, 308, 3, 92, 46, 0, 308, 309, 5, 33, 0, 0, 309, 310, 3, 92, 46, 0, 310, 37, 1, 0, 0, 0, 311, 314, 3, 40, 20, 0, 312, 314, 3, 42, 21, 0, 313, 311, 1, 0, 0, 0, 313, 312, 1, 0, 0, 0, 314, 39, 1, 0, 0, 0, 315, 316, 5, 73, 0, 0, 316, 321, 3, 32, 16, 0, 317, 318, 5, 35, 0, 0, 318, 320, 3, 32, 16, 0, 319, 317, 1, 0, 0, 0, 320, 323, 1, 0, 0, 0, 321, 319, 1, 0, 0, 0, 321, 322, 1, 0, 0, 0, 322, 41, 1, 0, 0, 0, 323, 321, 1, 0, 0, 0, 324, 325, 5, 65, 0, 0, 325, 326, 3, 40, 20, 0, 326, 327, 5, 66, 0, 0, 327, 43, 1, 0, 0, 0, 328, 329, 5, 4, 0, 0, 329, 330, 3, 26, 13, 0, 330, 45, 1, 0, 0, 0, 331, 333, 5, 17, 0, 0, 332, 334, 3, 26, 13, 0, 333, 332, 1, 0, 0, 0, 333, 334, 1, 0, 0, 0, 334, 337, 1, 0, 0, 0, 335, 336, 5, 30, 0, 0, 336, 338, 3, 26, 13, 0, 337, 335, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 47, 1, 0, 0, 0, 339, 340, 5, 8, 0, 0, 340, 343, 3, 26, 13, 0, 341, 342, 5, 30, 0, 0, 342, 344, 3, 26, 13, 0, 343, 341, 1, 0, 0, 0, 343, 344, 1, 0, 0, 0, 344, 49, 1, 0, 0, 0, 345, 350, 3, 54, 27, 0, 346, 347, 5, 37, 0, 0, 347, 349, 3, 54, 27, 0, 348, 346, 1, 0, 0, 0, 349, 352, 1, 0, 0, 0, 350, 348, 1, 0, 0, 0, 350, 351, 1, 0, 0, 0, 351, 51, 1, 0, 0, 0, 352, 350, 1, 0, 0, 0, 353, 358, 3, 56, 28, 0, 354, 355, 5, 37, 0, 0, 355, 357, 3, 56, 28, 0, 356, 354, 1, 0, 0, 0, 357, 360, 1, 0, 0, 0, 358, 356, 1, 0, 0, 0, 358, 359, 1, 0, 0, 0, 359, 53, 1, 0, 0, 0, 360, 358, 1, 0, 0, 0, 361, 362, 7, 2, 0, 0, 362, 55, 1, 0, 0, 0, 363, 364, 5, 78, 0, 0, 364, 57, 1, 0, 0, 0, 365, 408, 5, 46, 0, 0, 366, 367, 3, 90, 45, 0, 367, 368, 5, 67, 0, 0, 368, 408, 1, 0, 0, 0, 369, 408, 3, 88, 44, 0, 370, 408, 3, 90, 45, 0, 371, 408, 3, 84, 42, 0, 372, 408, 5, 49, 0, 0, 373, 408, 3, 92, 46, 0, 374, 375, 5, 65, 0, 0, 375, 380, 3, 86, 43, 0, 376, 377, 5, 35, 0, 0, 377, 379, 3, 86, 43, 0, 378, 376, 1, 0, 0, 0, 379, 382, 1, 0, 0, 0, 380, 378, 1, 0, 0, 0, 380, 381, 1, 0, 0, 0, 381, 383, 1, 0, 0, 0, 382, 380, 1, 0, 0, 0, 383, 384, 5, 66, 0, 0, 384, 408, 1, 0, 0, 0, 385, 386, 5, 65, 0, 0, 386, 391, 3, 84, 42, 0, 387, 388, 5, 35, 0, 0, 388, 390, 3, 84, 42, 0, 389, 387, 1, 0, 0, 0, 390, 393, 1, 0, 0, 0, 391, 389, 1, 0, 0, 0, 391, 392, 1, 0, 0, 0, 392, 394, 1, 0, 0, 0, 393, 391, 1, 0, 0, 0, 394, 395, 5, 66, 0, 0, 395, 408, 1, 0, 0, 0, 396, 397, 5, 65, 0, 0, 397, 402, 3, 92, 46, 0, 398, 399, 5, 35, 0, 0, 399, 401, 3, 92, 46, 0, 400, 398, 1, 0, 0, 0, 401, 404, 1, 0, 0, 0, 402, 400, 1, 0, 0, 0, 402, 403, 1, 0, 0, 0, 403, 405, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 405, 406, 5, 66, 0, 0, 406, 408, 1, 0, 0, 0, 407, 365, 1, 0, 0, 0, 407, 366, 1, 0, 0, 0, 407, 369, 1, 0, 0, 0, 407, 370, 1, 0, 0, 0, 407, 371, 1, 0, 0, 0, 407, 372, 1, 0, 0, 0, 407, 373, 1, 0, 0, 0, 407, 374, 1, 0, 0, 0, 407, 385, 1, 0, 0, 0, 407, 396, 1, 0, 0, 0, 408, 59, 1, 0, 0, 0, 409, 410, 5, 10, 0, 0, 410, 411, 5, 28, 0, 0, 411, 61, 1, 0, 0, 0, 412, 413, 5, 16, 0, 0, 413, 418, 3, 64, 32, 0, 414, 415, 5, 35, 0, 0, 415, 417, 3, 64, 32, 0, 416, 414, 1, 0, 0, 0, 417, 420, 1, 0, 0, 0, 418, 416, 1, 0, 0, 0, 418, 419, 1, 0, 0, 0, 419, 63, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 421, 423, 3, 10, 5, 0, 422, 424, 7, 3, 0, 0, 423, 422, 1, 0, 0, 0, 423, 424, 1, 0, 0, 0, 424, 427, 1, 0, 0, 0, 425, 426, 5, 47, 0, 0, 426, 428, 7, 4, 0, 0, 427, 425, 1, 0, 0, 0, 427, 428, 1, 0, 0, 0, 428, 65, 1, 0, 0, 0, 429, 430, 5, 9, 0, 0, 430, 435, 3, 52, 26, 0, 431, 432, 5, 35, 0, 0, 432, 434, 3, 52, 26, 0, 433, 431, 1, 0, 0, 0, 434, 437, 1, 0, 0, 0, 435, 433, 1, 0, 0, 0, 435, 436, 1, 0, 0, 0, 436, 67, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 438, 439, 5, 2, 0, 0, 439, 444, 3, 52, 26, 0, 440, 441, 5, 35, 0, 0, 441, 443, 3, 52, 26, 0, 442, 440, 1, 0, 0, 0, 443, 446, 1, 0, 0, 0, 444, 442, 1, 0, 0, 0, 444, 445, 1, 0, 0, 0, 445, 69, 1, 0, 0, 0, 446, 444, 1, 0, 0, 0, 447, 448, 5, 13, 0, 0, 448, 453, 3, 72, 36, 0, 449, 450, 5, 35, 0, 0, 450, 452, 3, 72, 36, 0, 451, 449, 1, 0, 0, 0, 452, 455, 1, 0, 0, 0, 453, 451, 1, 0, 0, 0, 453, 454, 1, 0, 0, 0, 454, 71, 1, 0, 0, 0, 455, 453, 1, 0, 0, 0, 456, 457, 3, 52, 26, 0, 457, 458, 5, 82, 0, 0, 458, 459, 3, 52, 26, 0, 459, 73, 1, 0, 0, 0, 460, 461, 5, 1, 0, 0, 461, 462, 3, 18, 9, 0, 462, 464, 3, 92, 46, 0, 463, 465, 3, 80, 40, 0, 464, 463, 1, 0, 0, 0, 464, 465, 1, 0, 0, 0, 465, 75, 1, 0, 0, 0, 466, 467, 5, 7, 0, 0, 467, 468, 3, 18, 9, 0, 468, 469, 3, 92, 46, 0, 469, 77, 1, 0, 0, 0, 470, 471, 5, 12, 0, 0, 471, 472, 3, 50, 25, 0, 472, 79, 1, 0, 0, 0, 473, 478, 3, 82, 41, 0, 474, 475, 5, 35, 0, 0, 475, 477, 3, 82, 41, 0, 476, 474, 1, 0, 0, 0, 477, 480, 1, 0, 0, 0, 478, 476, 1, 0, 0, 0, 478, 479, 1, 0, 0, 0, 479, 81, 1, 0, 0, 0, 480, 478, 1, 0, 0, 0, 481, 482, 3, 54, 27, 0, 482, 483, 5, 33, 0, 0, 483, 484, 3, 58, 29, 0, 484, 83, 1, 0, 0, 0, 485, 486, 7, 5, 0, 0, 486, 85, 1, 0, 0, 0, 487, 490, 3, 88, 44, 0, 488, 490, 3, 90, 45, 0, 489, 487, 1, 0, 0, 0, 489, 488, 1, 0, 0, 0, 490, 87, 1, 0, 0, 0, 491, 493, 7, 0, 0, 0, 492, 491, 1, 0, 0, 0, 492, 493, 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 495, 5, 29, 0, 0, 495, 89, 1, 0, 0, 0, 496, 498, 7, 0, 0, 0, 497, 496, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 499, 1, 0, 0, 0, 499, 500, 5, 28, 0, 0, 500, 91, 1, 0, 0, 0, 501, 502, 5, 27, 0, 0, 502, 93, 1, 0, 0, 0, 503, 504, 7, 6, 0, 0, 504, 95, 1, 0, 0, 0, 505, 506, 5, 5, 0, 0, 506, 507, 3, 98, 49, 0, 507, 97, 1, 0, 0, 0, 508, 509, 5, 65, 0, 0, 509, 510, 3, 2, 1, 0, 510, 511, 5, 66, 0, 0, 511, 99, 1, 0, 0, 0, 512, 513, 5, 15, 0, 0, 513, 514, 5, 98, 0, 0, 514, 101, 1, 0, 0, 0, 515, 516, 5, 11, 0, 0, 516, 517, 5, 102, 0, 0, 517, 103, 1, 0, 0, 0, 518, 519, 5, 3, 0, 0, 519, 522, 5, 88, 0, 0, 520, 521, 5, 86, 0, 0, 521, 523, 3, 52, 26, 0, 522, 520, 1, 0, 0, 0, 522, 523, 1, 0, 0, 0, 523, 533, 1, 0, 0, 0, 524, 525, 5, 87, 0, 0, 525, 530, 3, 106, 53, 0, 526, 527, 5, 35, 0, 0, 527, 529, 3, 106, 53, 0, 528, 526, 1, 0, 0, 0, 529, 532, 1, 0, 0, 0, 530, 528, 1, 0, 0, 0, 530, 531, 1, 0, 0, 0, 531, 534, 1, 0, 0, 0, 532, 530, 1, 0, 0, 0, 533, 524, 1, 0, 0, 0, 533, 534, 1, 0, 0, 0, 534, 105, 1, 0, 0, 0, 535, 536, 3, 52, 26, 0, 536, 537, 5, 33, 0, 0, 537, 539, 1, 0, 0, 0, 538, 535, 1, 0, 0, 0, 538, 539, 1, 0, 0, 0, 539, 540, 1, 0, 0, 0, 540, 541, 3, 52, 26, 0, 541, 107, 1, 0, 0, 0, 52, 119, 127, 142, 154, 163, 171, 175, 183, 185, 190, 197, 202, 209, 215, 223, 225, 236, 243, 254, 257, 271, 279, 287, 291, 294, 304, 313, 321, 333, 337, 343, 350, 358, 380, 391, 402, 407, 418, 423, 427, 435, 444, 453, 464, 478, 489, 492, 497, 522, 530, 533, 538] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index 1f9c13c16cdd..2f7f0468e455 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -2101,7 +2101,6 @@ public final FromCommandContext fromCommand() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class FromIdentifierContext extends ParserRuleContext { public TerminalNode FROM_UNQUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.FROM_UNQUOTED_IDENTIFIER, 0); } - public TerminalNode QUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.QUOTED_IDENTIFIER, 0); } @SuppressWarnings("this-escape") public FromIdentifierContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -2125,20 +2124,11 @@ public T accept(ParseTreeVisitor visitor) { public final FromIdentifierContext fromIdentifier() throws RecognitionException { FromIdentifierContext _localctx = new FromIdentifierContext(_ctx, getState()); enterRule(_localctx, 32, RULE_fromIdentifier); - int _la; try { enterOuterAlt(_localctx, 1); { setState(296); - _la = _input.LA(1); - if ( !(_la==QUOTED_IDENTIFIER || _la==FROM_UNQUOTED_IDENTIFIER) ) { - _errHandler.recoverInline(this); - } - else { - if ( _input.LA(1)==Token.EOF ) matchedEOF = true; - _errHandler.reportMatch(this); - consume(); - } + match(FROM_UNQUOTED_IDENTIFIER); } } catch (RecognitionException re) { @@ -4971,32 +4961,32 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in "\u00015\u00015\u00035\u021b\b5\u00015\u00015\u00015\u0000\u0004\u0002"+ "\n\u0010\u00126\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014"+ "\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`bdfh"+ - "j\u0000\b\u0001\u0000<=\u0001\u0000>@\u0002\u0000DDJJ\u0001\u0000CD\u0002"+ - "\u0000 $$\u0001\u0000\'(\u0002\u0000&&44\u0002\u0000557;\u0238\u0000"+ - "l\u0001\u0000\u0000\u0000\u0002o\u0001\u0000\u0000\u0000\u0004\u007f\u0001"+ - "\u0000\u0000\u0000\u0006\u008e\u0001\u0000\u0000\u0000\b\u0090\u0001\u0000"+ - "\u0000\u0000\n\u00af\u0001\u0000\u0000\u0000\f\u00ca\u0001\u0000\u0000"+ - "\u0000\u000e\u00d1\u0001\u0000\u0000\u0000\u0010\u00d7\u0001\u0000\u0000"+ - "\u0000\u0012\u00ec\u0001\u0000\u0000\u0000\u0014\u00f6\u0001\u0000\u0000"+ - "\u0000\u0016\u0105\u0001\u0000\u0000\u0000\u0018\u0107\u0001\u0000\u0000"+ - "\u0000\u001a\u010a\u0001\u0000\u0000\u0000\u001c\u0117\u0001\u0000\u0000"+ - "\u0000\u001e\u0119\u0001\u0000\u0000\u0000 \u0128\u0001\u0000\u0000\u0000"+ - "\"\u012a\u0001\u0000\u0000\u0000$\u0133\u0001\u0000\u0000\u0000&\u0139"+ - "\u0001\u0000\u0000\u0000(\u013b\u0001\u0000\u0000\u0000*\u0144\u0001\u0000"+ - "\u0000\u0000,\u0148\u0001\u0000\u0000\u0000.\u014b\u0001\u0000\u0000\u0000"+ - "0\u0153\u0001\u0000\u0000\u00002\u0159\u0001\u0000\u0000\u00004\u0161"+ - "\u0001\u0000\u0000\u00006\u0169\u0001\u0000\u0000\u00008\u016b\u0001\u0000"+ - "\u0000\u0000:\u0197\u0001\u0000\u0000\u0000<\u0199\u0001\u0000\u0000\u0000"+ - ">\u019c\u0001\u0000\u0000\u0000@\u01a5\u0001\u0000\u0000\u0000B\u01ad"+ - "\u0001\u0000\u0000\u0000D\u01b6\u0001\u0000\u0000\u0000F\u01bf\u0001\u0000"+ - "\u0000\u0000H\u01c8\u0001\u0000\u0000\u0000J\u01cc\u0001\u0000\u0000\u0000"+ - "L\u01d2\u0001\u0000\u0000\u0000N\u01d6\u0001\u0000\u0000\u0000P\u01d9"+ - "\u0001\u0000\u0000\u0000R\u01e1\u0001\u0000\u0000\u0000T\u01e5\u0001\u0000"+ - "\u0000\u0000V\u01e9\u0001\u0000\u0000\u0000X\u01ec\u0001\u0000\u0000\u0000"+ - "Z\u01f1\u0001\u0000\u0000\u0000\\\u01f5\u0001\u0000\u0000\u0000^\u01f7"+ - "\u0001\u0000\u0000\u0000`\u01f9\u0001\u0000\u0000\u0000b\u01fc\u0001\u0000"+ - "\u0000\u0000d\u0200\u0001\u0000\u0000\u0000f\u0203\u0001\u0000\u0000\u0000"+ - "h\u0206\u0001\u0000\u0000\u0000j\u021a\u0001\u0000\u0000\u0000lm\u0003"+ + "j\u0000\u0007\u0001\u0000<=\u0001\u0000>@\u0001\u0000CD\u0002\u0000 "+ + "$$\u0001\u0000\'(\u0002\u0000&&44\u0002\u0000557;\u0238\u0000l\u0001\u0000"+ + "\u0000\u0000\u0002o\u0001\u0000\u0000\u0000\u0004\u007f\u0001\u0000\u0000"+ + "\u0000\u0006\u008e\u0001\u0000\u0000\u0000\b\u0090\u0001\u0000\u0000\u0000"+ + "\n\u00af\u0001\u0000\u0000\u0000\f\u00ca\u0001\u0000\u0000\u0000\u000e"+ + "\u00d1\u0001\u0000\u0000\u0000\u0010\u00d7\u0001\u0000\u0000\u0000\u0012"+ + "\u00ec\u0001\u0000\u0000\u0000\u0014\u00f6\u0001\u0000\u0000\u0000\u0016"+ + "\u0105\u0001\u0000\u0000\u0000\u0018\u0107\u0001\u0000\u0000\u0000\u001a"+ + "\u010a\u0001\u0000\u0000\u0000\u001c\u0117\u0001\u0000\u0000\u0000\u001e"+ + "\u0119\u0001\u0000\u0000\u0000 \u0128\u0001\u0000\u0000\u0000\"\u012a"+ + "\u0001\u0000\u0000\u0000$\u0133\u0001\u0000\u0000\u0000&\u0139\u0001\u0000"+ + "\u0000\u0000(\u013b\u0001\u0000\u0000\u0000*\u0144\u0001\u0000\u0000\u0000"+ + ",\u0148\u0001\u0000\u0000\u0000.\u014b\u0001\u0000\u0000\u00000\u0153"+ + "\u0001\u0000\u0000\u00002\u0159\u0001\u0000\u0000\u00004\u0161\u0001\u0000"+ + "\u0000\u00006\u0169\u0001\u0000\u0000\u00008\u016b\u0001\u0000\u0000\u0000"+ + ":\u0197\u0001\u0000\u0000\u0000<\u0199\u0001\u0000\u0000\u0000>\u019c"+ + "\u0001\u0000\u0000\u0000@\u01a5\u0001\u0000\u0000\u0000B\u01ad\u0001\u0000"+ + "\u0000\u0000D\u01b6\u0001\u0000\u0000\u0000F\u01bf\u0001\u0000\u0000\u0000"+ + "H\u01c8\u0001\u0000\u0000\u0000J\u01cc\u0001\u0000\u0000\u0000L\u01d2"+ + "\u0001\u0000\u0000\u0000N\u01d6\u0001\u0000\u0000\u0000P\u01d9\u0001\u0000"+ + "\u0000\u0000R\u01e1\u0001\u0000\u0000\u0000T\u01e5\u0001\u0000\u0000\u0000"+ + "V\u01e9\u0001\u0000\u0000\u0000X\u01ec\u0001\u0000\u0000\u0000Z\u01f1"+ + "\u0001\u0000\u0000\u0000\\\u01f5\u0001\u0000\u0000\u0000^\u01f7\u0001"+ + "\u0000\u0000\u0000`\u01f9\u0001\u0000\u0000\u0000b\u01fc\u0001\u0000\u0000"+ + "\u0000d\u0200\u0001\u0000\u0000\u0000f\u0203\u0001\u0000\u0000\u0000h"+ + "\u0206\u0001\u0000\u0000\u0000j\u021a\u0001\u0000\u0000\u0000lm\u0003"+ "\u0002\u0001\u0000mn\u0005\u0000\u0000\u0001n\u0001\u0001\u0000\u0000"+ "\u0000op\u0006\u0001\uffff\uffff\u0000pq\u0003\u0004\u0002\u0000qw\u0001"+ "\u0000\u0000\u0000rs\n\u0001\u0000\u0000st\u0005\u001a\u0000\u0000tv\u0003"+ @@ -5105,42 +5095,42 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in "\u0000\u0000\u0123\u0124\u0001\u0000\u0000\u0000\u0124\u0126\u0001\u0000"+ "\u0000\u0000\u0125\u0127\u0003\"\u0011\u0000\u0126\u0125\u0001\u0000\u0000"+ "\u0000\u0126\u0127\u0001\u0000\u0000\u0000\u0127\u001f\u0001\u0000\u0000"+ - "\u0000\u0128\u0129\u0007\u0002\u0000\u0000\u0129!\u0001\u0000\u0000\u0000"+ - "\u012a\u012b\u0005H\u0000\u0000\u012b\u0130\u0003$\u0012\u0000\u012c\u012d"+ - "\u0005#\u0000\u0000\u012d\u012f\u0003$\u0012\u0000\u012e\u012c\u0001\u0000"+ - "\u0000\u0000\u012f\u0132\u0001\u0000\u0000\u0000\u0130\u012e\u0001\u0000"+ - "\u0000\u0000\u0130\u0131\u0001\u0000\u0000\u0000\u0131#\u0001\u0000\u0000"+ - "\u0000\u0132\u0130\u0001\u0000\u0000\u0000\u0133\u0134\u0003\\.\u0000"+ - "\u0134\u0135\u0005!\u0000\u0000\u0135\u0136\u0003\\.\u0000\u0136%\u0001"+ - "\u0000\u0000\u0000\u0137\u013a\u0003(\u0014\u0000\u0138\u013a\u0003*\u0015"+ - "\u0000\u0139\u0137\u0001\u0000\u0000\u0000\u0139\u0138\u0001\u0000\u0000"+ - "\u0000\u013a\'\u0001\u0000\u0000\u0000\u013b\u013c\u0005I\u0000\u0000"+ - "\u013c\u0141\u0003 \u0010\u0000\u013d\u013e\u0005#\u0000\u0000\u013e\u0140"+ - "\u0003 \u0010\u0000\u013f\u013d\u0001\u0000\u0000\u0000\u0140\u0143\u0001"+ - "\u0000\u0000\u0000\u0141\u013f\u0001\u0000\u0000\u0000\u0141\u0142\u0001"+ - "\u0000\u0000\u0000\u0142)\u0001\u0000\u0000\u0000\u0143\u0141\u0001\u0000"+ - "\u0000\u0000\u0144\u0145\u0005A\u0000\u0000\u0145\u0146\u0003(\u0014\u0000"+ - "\u0146\u0147\u0005B\u0000\u0000\u0147+\u0001\u0000\u0000\u0000\u0148\u0149"+ - "\u0005\u0004\u0000\u0000\u0149\u014a\u0003\u001a\r\u0000\u014a-\u0001"+ - "\u0000\u0000\u0000\u014b\u014d\u0005\u0011\u0000\u0000\u014c\u014e\u0003"+ - "\u001a\r\u0000\u014d\u014c\u0001\u0000\u0000\u0000\u014d\u014e\u0001\u0000"+ - "\u0000\u0000\u014e\u0151\u0001\u0000\u0000\u0000\u014f\u0150\u0005\u001e"+ - "\u0000\u0000\u0150\u0152\u0003\u001a\r\u0000\u0151\u014f\u0001\u0000\u0000"+ - "\u0000\u0151\u0152\u0001\u0000\u0000\u0000\u0152/\u0001\u0000\u0000\u0000"+ - "\u0153\u0154\u0005\b\u0000\u0000\u0154\u0157\u0003\u001a\r\u0000\u0155"+ - "\u0156\u0005\u001e\u0000\u0000\u0156\u0158\u0003\u001a\r\u0000\u0157\u0155"+ - "\u0001\u0000\u0000\u0000\u0157\u0158\u0001\u0000\u0000\u0000\u01581\u0001"+ - "\u0000\u0000\u0000\u0159\u015e\u00036\u001b\u0000\u015a\u015b\u0005%\u0000"+ - "\u0000\u015b\u015d\u00036\u001b\u0000\u015c\u015a\u0001\u0000\u0000\u0000"+ - "\u015d\u0160\u0001\u0000\u0000\u0000\u015e\u015c\u0001\u0000\u0000\u0000"+ - "\u015e\u015f\u0001\u0000\u0000\u0000\u015f3\u0001\u0000\u0000\u0000\u0160"+ - "\u015e\u0001\u0000\u0000\u0000\u0161\u0166\u00038\u001c\u0000\u0162\u0163"+ - "\u0005%\u0000\u0000\u0163\u0165\u00038\u001c\u0000\u0164\u0162\u0001\u0000"+ - "\u0000\u0000\u0165\u0168\u0001\u0000\u0000\u0000\u0166\u0164\u0001\u0000"+ - "\u0000\u0000\u0166\u0167\u0001\u0000\u0000\u0000\u01675\u0001\u0000\u0000"+ - "\u0000\u0168\u0166\u0001\u0000\u0000\u0000\u0169\u016a\u0007\u0003\u0000"+ - "\u0000\u016a7\u0001\u0000\u0000\u0000\u016b\u016c\u0005N\u0000\u0000\u016c"+ - "9\u0001\u0000\u0000\u0000\u016d\u0198\u0005.\u0000\u0000\u016e\u016f\u0003"+ + "\u0000\u0128\u0129\u0005J\u0000\u0000\u0129!\u0001\u0000\u0000\u0000\u012a"+ + "\u012b\u0005H\u0000\u0000\u012b\u0130\u0003$\u0012\u0000\u012c\u012d\u0005"+ + "#\u0000\u0000\u012d\u012f\u0003$\u0012\u0000\u012e\u012c\u0001\u0000\u0000"+ + "\u0000\u012f\u0132\u0001\u0000\u0000\u0000\u0130\u012e\u0001\u0000\u0000"+ + "\u0000\u0130\u0131\u0001\u0000\u0000\u0000\u0131#\u0001\u0000\u0000\u0000"+ + "\u0132\u0130\u0001\u0000\u0000\u0000\u0133\u0134\u0003\\.\u0000\u0134"+ + "\u0135\u0005!\u0000\u0000\u0135\u0136\u0003\\.\u0000\u0136%\u0001\u0000"+ + "\u0000\u0000\u0137\u013a\u0003(\u0014\u0000\u0138\u013a\u0003*\u0015\u0000"+ + "\u0139\u0137\u0001\u0000\u0000\u0000\u0139\u0138\u0001\u0000\u0000\u0000"+ + "\u013a\'\u0001\u0000\u0000\u0000\u013b\u013c\u0005I\u0000\u0000\u013c"+ + "\u0141\u0003 \u0010\u0000\u013d\u013e\u0005#\u0000\u0000\u013e\u0140\u0003"+ + " \u0010\u0000\u013f\u013d\u0001\u0000\u0000\u0000\u0140\u0143\u0001\u0000"+ + "\u0000\u0000\u0141\u013f\u0001\u0000\u0000\u0000\u0141\u0142\u0001\u0000"+ + "\u0000\u0000\u0142)\u0001\u0000\u0000\u0000\u0143\u0141\u0001\u0000\u0000"+ + "\u0000\u0144\u0145\u0005A\u0000\u0000\u0145\u0146\u0003(\u0014\u0000\u0146"+ + "\u0147\u0005B\u0000\u0000\u0147+\u0001\u0000\u0000\u0000\u0148\u0149\u0005"+ + "\u0004\u0000\u0000\u0149\u014a\u0003\u001a\r\u0000\u014a-\u0001\u0000"+ + "\u0000\u0000\u014b\u014d\u0005\u0011\u0000\u0000\u014c\u014e\u0003\u001a"+ + "\r\u0000\u014d\u014c\u0001\u0000\u0000\u0000\u014d\u014e\u0001\u0000\u0000"+ + "\u0000\u014e\u0151\u0001\u0000\u0000\u0000\u014f\u0150\u0005\u001e\u0000"+ + "\u0000\u0150\u0152\u0003\u001a\r\u0000\u0151\u014f\u0001\u0000\u0000\u0000"+ + "\u0151\u0152\u0001\u0000\u0000\u0000\u0152/\u0001\u0000\u0000\u0000\u0153"+ + "\u0154\u0005\b\u0000\u0000\u0154\u0157\u0003\u001a\r\u0000\u0155\u0156"+ + "\u0005\u001e\u0000\u0000\u0156\u0158\u0003\u001a\r\u0000\u0157\u0155\u0001"+ + "\u0000\u0000\u0000\u0157\u0158\u0001\u0000\u0000\u0000\u01581\u0001\u0000"+ + "\u0000\u0000\u0159\u015e\u00036\u001b\u0000\u015a\u015b\u0005%\u0000\u0000"+ + "\u015b\u015d\u00036\u001b\u0000\u015c\u015a\u0001\u0000\u0000\u0000\u015d"+ + "\u0160\u0001\u0000\u0000\u0000\u015e\u015c\u0001\u0000\u0000\u0000\u015e"+ + "\u015f\u0001\u0000\u0000\u0000\u015f3\u0001\u0000\u0000\u0000\u0160\u015e"+ + "\u0001\u0000\u0000\u0000\u0161\u0166\u00038\u001c\u0000\u0162\u0163\u0005"+ + "%\u0000\u0000\u0163\u0165\u00038\u001c\u0000\u0164\u0162\u0001\u0000\u0000"+ + "\u0000\u0165\u0168\u0001\u0000\u0000\u0000\u0166\u0164\u0001\u0000\u0000"+ + "\u0000\u0166\u0167\u0001\u0000\u0000\u0000\u01675\u0001\u0000\u0000\u0000"+ + "\u0168\u0166\u0001\u0000\u0000\u0000\u0169\u016a\u0007\u0002\u0000\u0000"+ + "\u016a7\u0001\u0000\u0000\u0000\u016b\u016c\u0005N\u0000\u0000\u016c9"+ + "\u0001\u0000\u0000\u0000\u016d\u0198\u0005.\u0000\u0000\u016e\u016f\u0003"+ "Z-\u0000\u016f\u0170\u0005C\u0000\u0000\u0170\u0198\u0001\u0000\u0000"+ "\u0000\u0171\u0198\u0003X,\u0000\u0172\u0198\u0003Z-\u0000\u0173\u0198"+ "\u0003T*\u0000\u0174\u0198\u00051\u0000\u0000\u0175\u0198\u0003\\.\u0000"+ @@ -5172,10 +5162,10 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in "@ \u0000\u01a0\u019e\u0001\u0000\u0000\u0000\u01a1\u01a4\u0001\u0000\u0000"+ "\u0000\u01a2\u01a0\u0001\u0000\u0000\u0000\u01a2\u01a3\u0001\u0000\u0000"+ "\u0000\u01a3?\u0001\u0000\u0000\u0000\u01a4\u01a2\u0001\u0000\u0000\u0000"+ - "\u01a5\u01a7\u0003\n\u0005\u0000\u01a6\u01a8\u0007\u0004\u0000\u0000\u01a7"+ + "\u01a5\u01a7\u0003\n\u0005\u0000\u01a6\u01a8\u0007\u0003\u0000\u0000\u01a7"+ "\u01a6\u0001\u0000\u0000\u0000\u01a7\u01a8\u0001\u0000\u0000\u0000\u01a8"+ "\u01ab\u0001\u0000\u0000\u0000\u01a9\u01aa\u0005/\u0000\u0000\u01aa\u01ac"+ - "\u0007\u0005\u0000\u0000\u01ab\u01a9\u0001\u0000\u0000\u0000\u01ab\u01ac"+ + "\u0007\u0004\u0000\u0000\u01ab\u01a9\u0001\u0000\u0000\u0000\u01ab\u01ac"+ "\u0001\u0000\u0000\u0000\u01acA\u0001\u0000\u0000\u0000\u01ad\u01ae\u0005"+ "\t\u0000\u0000\u01ae\u01b3\u00034\u001a\u0000\u01af\u01b0\u0005#\u0000"+ "\u0000\u01b0\u01b2\u00034\u001a\u0000\u01b1\u01af\u0001\u0000\u0000\u0000"+ @@ -5204,7 +5194,7 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in "\u0000\u0000\u0000\u01de\u01df\u0001\u0000\u0000\u0000\u01dfQ\u0001\u0000"+ "\u0000\u0000\u01e0\u01de\u0001\u0000\u0000\u0000\u01e1\u01e2\u00036\u001b"+ "\u0000\u01e2\u01e3\u0005!\u0000\u0000\u01e3\u01e4\u0003:\u001d\u0000\u01e4"+ - "S\u0001\u0000\u0000\u0000\u01e5\u01e6\u0007\u0006\u0000\u0000\u01e6U\u0001"+ + "S\u0001\u0000\u0000\u0000\u01e5\u01e6\u0007\u0005\u0000\u0000\u01e6U\u0001"+ "\u0000\u0000\u0000\u01e7\u01ea\u0003X,\u0000\u01e8\u01ea\u0003Z-\u0000"+ "\u01e9\u01e7\u0001\u0000\u0000\u0000\u01e9\u01e8\u0001\u0000\u0000\u0000"+ "\u01eaW\u0001\u0000\u0000\u0000\u01eb\u01ed\u0007\u0000\u0000\u0000\u01ec"+ @@ -5214,7 +5204,7 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in "\u0001\u0000\u0000\u0000\u01f1\u01f2\u0001\u0000\u0000\u0000\u01f2\u01f3"+ "\u0001\u0000\u0000\u0000\u01f3\u01f4\u0005\u001c\u0000\u0000\u01f4[\u0001"+ "\u0000\u0000\u0000\u01f5\u01f6\u0005\u001b\u0000\u0000\u01f6]\u0001\u0000"+ - "\u0000\u0000\u01f7\u01f8\u0007\u0007\u0000\u0000\u01f8_\u0001\u0000\u0000"+ + "\u0000\u0000\u01f7\u01f8\u0007\u0006\u0000\u0000\u01f8_\u0001\u0000\u0000"+ "\u0000\u01f9\u01fa\u0005\u0005\u0000\u0000\u01fa\u01fb\u0003b1\u0000\u01fb"+ "a\u0001\u0000\u0000\u0000\u01fc\u01fd\u0005A\u0000\u0000\u01fd\u01fe\u0003"+ "\u0002\u0001\u0000\u01fe\u01ff\u0005B\u0000\u0000\u01ffc\u0001\u0000\u0000"+ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java index 67f8eb407ee1..7f0b5c73b9fb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java @@ -25,7 +25,7 @@ public String visitIdentifier(IdentifierContext ctx) { @Override public String visitFromIdentifier(FromIdentifierContext ctx) { - return ctx == null ? null : unquoteIdentifier(ctx.QUOTED_IDENTIFIER(), ctx.FROM_UNQUOTED_IDENTIFIER()); + return ctx == null ? null : unquoteIdentifier(null, ctx.FROM_UNQUOTED_IDENTIFIER()); } protected static String unquoteIdentifier(TerminalNode quotedNode, TerminalNode unquotedNode) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index cf0dfa372ea3..1a36616cb647 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -338,17 +338,17 @@ public void testInlineStatsWithoutGroups() { } public void testIdentifiersAsIndexPattern() { - assertIdentifierAsIndexPattern("foo", "from `foo`"); - assertIdentifierAsIndexPattern("foo,test-*", "from `foo`,`test-*`"); + // assertIdentifierAsIndexPattern("foo", "from `foo`"); + // assertIdentifierAsIndexPattern("foo,test-*", "from `foo`,`test-*`"); assertIdentifierAsIndexPattern("foo,test-*", "from foo,test-*"); assertIdentifierAsIndexPattern("123-test@foo_bar+baz1", "from 123-test@foo_bar+baz1"); - assertIdentifierAsIndexPattern("foo,test-*,abc", "from `foo`,`test-*`,abc"); - assertIdentifierAsIndexPattern("foo, test-*, abc, xyz", "from `foo, test-*, abc, xyz`"); - assertIdentifierAsIndexPattern("foo, test-*, abc, xyz,test123", "from `foo, test-*, abc, xyz`, test123"); + // assertIdentifierAsIndexPattern("foo,test-*,abc", "from `foo`,`test-*`,abc"); + // assertIdentifierAsIndexPattern("foo, test-*, abc, xyz", "from `foo, test-*, abc, xyz`"); + // assertIdentifierAsIndexPattern("foo, test-*, abc, xyz,test123", "from `foo, test-*, abc, xyz`, test123"); assertIdentifierAsIndexPattern("foo,test,xyz", "from foo, test,xyz"); assertIdentifierAsIndexPattern( - ",", - "from , ``" + "", // , + "from " // , `` ); } From 155e7c58cd85ee9178cb818de0f637c2235feb5b Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 8 May 2024 21:34:36 -0700 Subject: [PATCH 015/119] Add factories for time series aggregation (#107803) This change introduces operator factories for time-series aggregations. A time-series aggregation executes in three stages, deviating from the typical two-stage aggregation. For example: `sum(rate(write_requests)), avg(cpu) BY cluster, time-bucket` **1. Initial Stage:** In this stage, a standard hash aggregation is executed, grouped by tsid and time-bucket. The `values` aggregations are added to collect values of the grouping keys excluding the time-bucket, which are then used for final result grouping. ``` rate[INITIAL](write_requests), avg[INITIAL](cpu), values[SINGLE](cluster) BY tsid, time-bucket ``` **2. Intermediate Stage:** Equivalent to the final mode of a standard hash aggregation. This stage merges and reduces the result of the rate aggregations, but merges without reducing the results of non-rate aggregations. Certain aggregations, such as count_distinct, cannot have their final results combined. ``` rate[FINAL](write_requests), avg[INTERMEDIATE](cpu), values[SINGLE](cluster) BY tsid, time-bucket ``` **3. Final Stage:** This extra stage performs outer aggregations over the rate results and combines the intermediate results of non-rate aggregations using the specified user-defined grouping keys. ``` sum[SINGLE](rate_result), avg[FINAL](cpu) BY cluster, bucket ``` --- ...imeSeriesAggregationOperatorFactories.java | 157 +++++++++ .../TimeSeriesAggregationOperatorFactory.java | 48 --- .../TimeSeriesAggregationOperatorTests.java | 314 +++++++++++------- 3 files changed, 349 insertions(+), 170 deletions(-) create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactories.java delete mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactory.java diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactories.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactories.java new file mode 100644 index 000000000000..bb8d3fd269a8 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactories.java @@ -0,0 +1,157 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator; + +import org.elasticsearch.compute.aggregation.AggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.compute.aggregation.GroupingAggregator; +import org.elasticsearch.compute.aggregation.blockhash.BlockHash; +import org.elasticsearch.compute.aggregation.blockhash.TimeSeriesBlockHash; +import org.elasticsearch.compute.data.ElementType; + +import java.util.ArrayList; +import java.util.List; + +/** + * This class provides operator factories for time-series aggregations. + * A time-series aggregation executes in three stages, deviating from the typical two-stage aggregation. + * For example: {@code sum(rate(write_requests)), avg(cpu) BY cluster, time-bucket} + * + * 1. Initial Stage: + * In this stage, a standard hash aggregation is executed, grouped by tsid and time-bucket. + * The {@code values} aggregations are added to collect values of the grouping keys excluding the time-bucket, + * which are then used for final result grouping. + * {@code rate[INITIAL](write_requests), avg[INITIAL](cpu), values[SINGLE](cluster) BY tsid, time-bucket} + * + * 2. Intermediate Stage: + * Equivalent to the final mode of a standard hash aggregation. + * This stage merges and reduces the result of the rate aggregations, + * but merges (without reducing) the results of non-rate aggregations. + * {@code rate[FINAL](write_requests), avg[INTERMEDIATE](cpu), values[SINGLE](cluster) BY tsid, time-bucket} + * + * 3. Final Stage: + * This extra stage performs outer aggregations over the rate results + * and combines the intermediate results of non-rate aggregations using the specified user-defined grouping keys. + * {@code sum[SINGLE](rate_result), avg[FINAL](cpu) BY cluster, bucket} + */ +public final class TimeSeriesAggregationOperatorFactories { + + public record Initial( + int tsHashChannel, + int timeBucketChannel, + List groupings, + List rates, + List nonRates, + int maxPageSize + ) implements Operator.OperatorFactory { + @Override + public Operator get(DriverContext driverContext) { + List aggregators = new ArrayList<>(groupings.size() + rates.size() + nonRates.size()); + for (AggregatorFunctionSupplier f : rates) { + aggregators.add(f.groupingAggregatorFactory(AggregatorMode.INITIAL)); + } + for (AggregatorFunctionSupplier f : nonRates) { + aggregators.add(f.groupingAggregatorFactory(AggregatorMode.INITIAL)); + } + aggregators.addAll(valuesAggregatorForGroupings(groupings, timeBucketChannel)); + return new HashAggregationOperator( + aggregators, + () -> new TimeSeriesBlockHash(tsHashChannel, timeBucketChannel, driverContext), + driverContext + ); + } + + @Override + public String describe() { + return "TimeSeriesInitialAggregationOperatorFactory"; + } + } + + public record Intermediate( + int tsHashChannel, + int timeBucketChannel, + List groupings, + List rates, + List nonRates, + int maxPageSize + ) implements Operator.OperatorFactory { + @Override + public Operator get(DriverContext driverContext) { + List aggregators = new ArrayList<>(groupings.size() + rates.size() + nonRates.size()); + for (AggregatorFunctionSupplier f : rates) { + aggregators.add(f.groupingAggregatorFactory(AggregatorMode.FINAL)); + } + for (AggregatorFunctionSupplier f : nonRates) { + aggregators.add(f.groupingAggregatorFactory(AggregatorMode.INTERMEDIATE)); + } + aggregators.addAll(valuesAggregatorForGroupings(groupings, timeBucketChannel)); + List hashGroups = List.of( + new BlockHash.GroupSpec(tsHashChannel, ElementType.BYTES_REF), + new BlockHash.GroupSpec(timeBucketChannel, ElementType.LONG) + ); + return new HashAggregationOperator( + aggregators, + () -> BlockHash.build(hashGroups, driverContext.blockFactory(), maxPageSize, false), + driverContext + ); + } + + @Override + public String describe() { + return "TimeSeriesIntermediateAggregationOperatorFactory"; + } + } + + public record Final( + List groupings, + List outerRates, + List nonRates, + int maxPageSize + ) implements Operator.OperatorFactory { + @Override + public Operator get(DriverContext driverContext) { + List aggregators = new ArrayList<>(outerRates.size() + nonRates.size()); + for (AggregatorFunctionSupplier f : outerRates) { + aggregators.add(f.groupingAggregatorFactory(AggregatorMode.SINGLE)); + } + for (AggregatorFunctionSupplier f : nonRates) { + aggregators.add(f.groupingAggregatorFactory(AggregatorMode.FINAL)); + } + return new HashAggregationOperator( + aggregators, + () -> BlockHash.build(groupings, driverContext.blockFactory(), maxPageSize, false), + driverContext + ); + } + + @Override + public String describe() { + return "TimeSeriesFinalAggregationOperatorFactory"; + } + } + + static List valuesAggregatorForGroupings(List groupings, int timeBucketChannel) { + List aggregators = new ArrayList<>(); + for (BlockHash.GroupSpec g : groupings) { + if (g.channel() != timeBucketChannel) { + final List channels = List.of(g.channel()); + // TODO: perhaps introduce a specialized aggregator for this? + var aggregatorSupplier = (switch (g.elementType()) { + case BYTES_REF -> new org.elasticsearch.compute.aggregation.ValuesBytesRefAggregatorFunctionSupplier(channels); + case DOUBLE -> new org.elasticsearch.compute.aggregation.ValuesDoubleAggregatorFunctionSupplier(channels); + case INT -> new org.elasticsearch.compute.aggregation.ValuesIntAggregatorFunctionSupplier(channels); + case LONG -> new org.elasticsearch.compute.aggregation.ValuesLongAggregatorFunctionSupplier(channels); + case BOOLEAN -> new org.elasticsearch.compute.aggregation.ValuesBooleanAggregatorFunctionSupplier(channels); + case NULL, DOC, UNKNOWN -> throw new IllegalArgumentException("unsupported grouping type"); + }); + aggregators.add(aggregatorSupplier.groupingAggregatorFactory(AggregatorMode.SINGLE)); + } + } + return aggregators; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactory.java deleted file mode 100644 index 0cf0854a9b0c..000000000000 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorFactory.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.operator; - -import org.elasticsearch.compute.aggregation.AggregatorMode; -import org.elasticsearch.compute.aggregation.GroupingAggregator; -import org.elasticsearch.compute.aggregation.blockhash.BlockHash; -import org.elasticsearch.compute.aggregation.blockhash.TimeSeriesBlockHash; -import org.elasticsearch.core.TimeValue; - -import java.util.List; - -public record TimeSeriesAggregationOperatorFactory( - AggregatorMode mode, - int tsHashChannel, - int timestampIntervalChannel, - TimeValue timeSeriesPeriod, - List aggregators, - int maxPageSize -) implements Operator.OperatorFactory { - - @Override - public String describe() { - return "TimeSeriesAggregationOperator[mode=" - + mode - + ", tsHashChannel = " - + tsHashChannel - + ", timestampIntervalChannel = " - + timestampIntervalChannel - + ", timeSeriesPeriod = " - + timeSeriesPeriod - + ", maxPageSize = " - + maxPageSize - + "]"; - } - - @Override - public Operator get(DriverContext driverContext) { - BlockHash blockHash = new TimeSeriesBlockHash(tsHashChannel, timestampIntervalChannel, driverContext); - return new HashAggregationOperator(aggregators, () -> blockHash, driverContext); - } - -} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java index 79135b12b2a8..573c960e86b9 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java @@ -11,65 +11,49 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Randomness; -import org.elasticsearch.compute.aggregation.AggregatorMode; +import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.compute.aggregation.RateLongAggregatorFunctionSupplier; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.aggregation.SumDoubleAggregatorFunctionSupplier; +import org.elasticsearch.compute.aggregation.blockhash.BlockHash; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockUtils; import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperatorTests; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper; -import org.hamcrest.Matcher; import org.junit.After; import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; -import java.util.Map; +import java.util.stream.IntStream; import static org.elasticsearch.compute.lucene.TimeSeriesSortedSourceOperatorTests.createTimeSeriesSourceOperator; import static org.elasticsearch.compute.lucene.TimeSeriesSortedSourceOperatorTests.writeTS; -import static org.elasticsearch.index.mapper.DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER; -import static org.elasticsearch.test.MapMatcher.assertMap; -import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.hamcrest.Matchers.equalTo; -public class TimeSeriesAggregationOperatorTests extends AnyOperatorTestCase { +public class TimeSeriesAggregationOperatorTests extends ComputeTestCase { - private IndexReader reader; - private final Directory directory = newDirectory(); + private IndexReader reader = null; + private Directory directory = null; @After public void cleanup() throws IOException { IOUtils.close(reader, directory); } - @Override - protected Operator.OperatorFactory simple() { - return new TimeSeriesAggregationOperatorFactory(AggregatorMode.FINAL, 0, 1, TimeValue.ZERO, List.of(), 100); + /** + * A {@link DriverContext} with a nonBreakingBigArrays. + */ + protected DriverContext driverContext() { // TODO make this final once all operators support memory tracking + BlockFactory blockFactory = blockFactory(); + return new DriverContext(blockFactory.bigArrays(), blockFactory); } - @Override - protected Matcher expectedDescriptionOfSimple() { - return equalTo( - "TimeSeriesAggregationOperator[mode=FINAL, tsHashChannel = 0, timestampIntervalChannel = 1, " - + "timeSeriesPeriod = 0s, maxPageSize = 100]" - ); - } - - @Override - protected Matcher expectedToStringOfSimple() { - return equalTo( - "HashAggregationOperator[blockHash=TimeSeriesBlockHash{keys=[BytesRefKey[channel=0], " - + "LongKey[channel=1]], entries=-1b}, aggregators=[]]" - ); - } - - public void testBasicRate() { + public void testBasicRate() throws Exception { long[] v1 = { 1, 1, 3, 0, 2, 9, 21, 3, 7, 7, 9, 12 }; long[] t1 = { 1, 5, 11, 20, 21, 59, 88, 91, 92, 97, 99, 112 }; @@ -78,25 +62,51 @@ public void testBasicRate() { long[] v3 = { 0, 1, 0, 1, 1, 4, 2, 2, 2, 2, 3, 5, 5 }; long[] t3 = { 2, 3, 5, 7, 8, 9, 10, 12, 14, 15, 18, 20, 22 }; - List pods = List.of(new Pod("p1", t1, v1), new Pod("p2", t2, v2), new Pod("p3", t3, v3)); - long unit = between(1, 5); - Map actualRates = runRateTest(pods, TimeValue.timeValueMillis(unit), TimeValue.ZERO); - assertThat( - actualRates, - equalTo( - Map.of( - new Group("\u0001\u0003pods\u0002p1", 0), - 35.0 * unit / 111.0, - new Group("\u0001\u0003pods\u0002p2", 0), - 42.0 * unit / 13.0, - new Group("\u0001\u0003pods\u0002p3", 0), - 10.0 * unit / 20.0 - ) - ) + List pods = List.of( + new Pod("p1", "cluster_1", new Interval(2100, t1, v1)), + new Pod("p2", "cluster_1", new Interval(600, t2, v2)), + new Pod("p3", "cluster_2", new Interval(1100, t3, v3)) ); + long unit = between(1, 5); + { + List> actual = runRateTest( + pods, + List.of("cluster"), + TimeValue.timeValueMillis(unit), + TimeValue.timeValueMillis(500) + ); + List> expected = List.of( + List.of(new BytesRef("cluster_1"), 35.0 * unit / 111.0 + 42.0 * unit / 13.0), + List.of(new BytesRef("cluster_2"), 10.0 * unit / 20.0) + ); + assertThat(actual, equalTo(expected)); + } + { + List> actual = runRateTest(pods, List.of("pod"), TimeValue.timeValueMillis(unit), TimeValue.timeValueMillis(500)); + List> expected = List.of( + List.of(new BytesRef("p1"), 35.0 * unit / 111.0), + List.of(new BytesRef("p2"), 42.0 * unit / 13.0), + List.of(new BytesRef("p3"), 10.0 * unit / 20.0) + ); + assertThat(actual, equalTo(expected)); + } + { + List> actual = runRateTest( + pods, + List.of("cluster", "bucket"), + TimeValue.timeValueMillis(unit), + TimeValue.timeValueMillis(500) + ); + List> expected = List.of( + List.of(new BytesRef("cluster_1"), 2000L, 35.0 * unit / 111.0), + List.of(new BytesRef("cluster_1"), 500L, 42.0 * unit / 13.0), + List.of(new BytesRef("cluster_2"), 1000L, 10.0 * unit / 20.0) + ); + assertThat(actual, equalTo(expected)); + } } - public void testRateWithInterval() { + public void testRateWithInterval() throws Exception { long[] v1 = { 1, 2, 3, 0, 1, 2, 3, 4, 5, 0, 1, 2, 3 }; long[] t1 = { 0, 10_000, 20_000, 30_000, 40_000, 50_000, 60_000, 70_000, 80_000, 90_000, 100_000, 110_000, 120_000 }; @@ -105,59 +115,71 @@ public void testRateWithInterval() { long[] v3 = { 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192 }; long[] t3 = { 0, 10_000, 20_000, 30_000, 40_000, 50_000, 60_000, 70_000, 80_000, 90_000, 100_000, 110_000, 120_000 }; - List pods = List.of(new Pod("p1", t1, v1), new Pod("p2", t2, v2), new Pod("p3", t3, v3)); - Map actualRates = runRateTest(pods, TimeValue.timeValueMillis(1), TimeValue.timeValueMinutes(1)); - assertMap( - actualRates, - matchesMap().entry(new Group("\u0001\u0003pods\u0002p1", 120_000), 0.0D) - .entry(new Group("\u0001\u0003pods\u0002p1", 60_000), 8.0E-5D) - .entry(new Group("\u0001\u0003pods\u0002p1", 0), 8.0E-5D) - .entry(new Group("\u0001\u0003pods\u0002p2", 120_000), 0.0D) - .entry(new Group("\u0001\u0003pods\u0002p2", 60_000), 0.0D) - .entry(new Group("\u0001\u0003pods\u0002p2", 0), 0.0D) - .entry(new Group("\u0001\u0003pods\u0002p3", 120_000), 0.0D) - .entry(new Group("\u0001\u0003pods\u0002p3", 60_000), 0.07936D) - .entry(new Group("\u0001\u0003pods\u0002p3", 0), 0.00124D) + List pods = List.of( + new Pod("p1", "cluster_1", new Interval(0, t1, v1)), + new Pod("p2", "cluster_2", new Interval(0, t2, v2)), + new Pod("p3", "cluster_2", new Interval(0, t3, v3)) + ); + List> actual = runRateTest( + pods, + List.of("pod", "bucket"), + TimeValue.timeValueMillis(1), + TimeValue.timeValueMinutes(1) + ); + List> expected = List.of( + List.of(new BytesRef("p1]"), 120_000L, 0.0D), + List.of(new BytesRef("p1"), 60_000L, 8.0E-5D), + List.of(new BytesRef("p1"), 0, 8.0E-5D), + List.of(new BytesRef("p2"), 120_000L, 0.0D), + List.of(new BytesRef("p2"), 60_000L, 0.0D), + List.of(new BytesRef("p2"), 0L, 0.0D), + List.of(new BytesRef("p3"), 120_000L, 0.0D), + List.of(new BytesRef("p3"), 60_000L, 0.07936D), + List.of(new BytesRef("p3"), 0L, 0.00124D) ); } - public void testRandomRate() { + public void testRandomRate() throws Exception { int numPods = between(1, 10); List pods = new ArrayList<>(); - Map expectedRates = new HashMap<>(); TimeValue unit = TimeValue.timeValueSeconds(1); + List> expected = new ArrayList<>(); for (int p = 0; p < numPods; p++) { - int numValues = between(2, 100); - long[] values = new long[numValues]; - long[] times = new long[numValues]; - long t = DEFAULT_DATE_TIME_FORMATTER.parseMillis("2024-01-01T00:00:00Z"); - for (int i = 0; i < numValues; i++) { - values[i] = randomIntBetween(0, 100); - t += TimeValue.timeValueSeconds(between(1, 10)).millis(); - times[i] = t; + int numIntervals = randomIntBetween(1, 3); + Interval[] intervals = new Interval[numIntervals]; + long startTimeInHours = between(10, 100); + String podName = "p" + p; + for (int interval = 0; interval < numIntervals; interval++) { + final long startInterval = TimeValue.timeValueHours(--startTimeInHours).millis(); + int numValues = between(2, 100); + long[] values = new long[numValues]; + long[] times = new long[numValues]; + long delta = 0; + for (int i = 0; i < numValues; i++) { + values[i] = randomIntBetween(0, 100); + delta += TimeValue.timeValueSeconds(between(1, 10)).millis(); + times[i] = delta; + } + intervals[interval] = new Interval(startInterval, times, values); + if (numValues == 1) { + expected.add(List.of(new BytesRef(podName), startInterval, null)); + } else { + expected.add(List.of(new BytesRef(podName), startInterval, intervals[interval].expectedRate(unit))); + } } - Pod pod = new Pod("p" + p, times, values); + Pod pod = new Pod(podName, "cluster", intervals); pods.add(pod); - if (numValues == 1) { - expectedRates.put(new Group("\u0001\u0003pods\u0002" + pod.name, 0), null); - } else { - expectedRates.put(new Group("\u0001\u0003pods\u0002" + pod.name, 0), pod.expectedRate(unit)); - } } - Map actualRates = runRateTest(pods, unit, TimeValue.ZERO); - assertThat(actualRates, equalTo(expectedRates)); + List> actual = runRateTest(pods, List.of("pod", "bucket"), unit, TimeValue.timeValueHours(1)); + assertThat(actual, equalTo(expected)); } - record Pod(String name, long[] times, long[] values) { - Pod { - assert times.length == values.length : times.length + "!=" + values.length; - } - + record Interval(long offset, long[] times, long[] values) { double expectedRate(TimeValue unit) { double dv = 0; - for (int i = 0; i < values.length - 1; i++) { - if (values[i + 1] < values[i]) { - dv += values[i]; + for (int v = 0; v < values.length - 1; v++) { + if (values[v + 1] < values[v]) { + dv += values[v]; } } dv += (values[values.length - 1] - values[0]); @@ -166,9 +188,13 @@ record Pod(String name, long[] times, long[] values) { } } - Map runRateTest(List pods, TimeValue unit, TimeValue interval) { + record Pod(String name, String cluster, Interval... intervals) {} + + List> runRateTest(List pods, List groupings, TimeValue unit, TimeValue bucketInterval) throws IOException { + cleanup(); + directory = newDirectory(); long unitInMillis = unit.millis(); - record Doc(String pod, long timestamp, long requests) { + record Doc(String pod, String cluster, long timestamp, long requests) { } var sourceOperatorFactory = createTimeSeriesSourceOperator( @@ -177,70 +203,114 @@ record Doc(String pod, long timestamp, long requests) { Integer.MAX_VALUE, between(1, 100), randomBoolean(), - interval, + bucketInterval, writer -> { List docs = new ArrayList<>(); for (Pod pod : pods) { - for (int i = 0; i < pod.times.length; i++) { - docs.add(new Doc(pod.name, pod.times[i], pod.values[i])); + for (Interval interval : pod.intervals) { + for (int i = 0; i < interval.times.length; i++) { + docs.add(new Doc(pod.name, pod.cluster, interval.offset + interval.times[i], interval.values[i])); + } } } Randomness.shuffle(docs); for (Doc doc : docs) { - writeTS(writer, doc.timestamp, new Object[] { "pod", doc.pod }, new Object[] { "requests", doc.requests }); + writeTS( + writer, + doc.timestamp, + new Object[] { "pod", doc.pod, "cluster", doc.cluster }, + new Object[] { "requests", doc.requests } + ); } return docs.size(); } ); var ctx = driverContext(); - var aggregators = List.of( - new RateLongAggregatorFunctionSupplier(List.of(4, 2), unitInMillis).groupingAggregatorFactory(AggregatorMode.INITIAL) - ); - Operator initialHash = new TimeSeriesAggregationOperatorFactory( - AggregatorMode.INITIAL, + List extractOperators = new ArrayList<>(); + var rateField = new NumberFieldMapper.NumberFieldType("requests", NumberFieldMapper.NumberType.LONG); + Operator extractRate = (ValuesSourceReaderOperatorTests.factory(reader, rateField, ElementType.LONG).get(ctx)); + extractOperators.add(extractRate); + List nonBucketGroupings = new ArrayList<>(groupings); + nonBucketGroupings.remove("bucket"); + for (String grouping : nonBucketGroupings) { + var groupingField = new KeywordFieldMapper.KeywordFieldType(grouping); + extractOperators.add(ValuesSourceReaderOperatorTests.factory(reader, groupingField, ElementType.BYTES_REF).get(ctx)); + } + // _doc, tsid, timestamp, bucket, requests, grouping1, grouping2 + Operator intialAgg = new TimeSeriesAggregationOperatorFactories.Initial( 1, 3, - interval, - aggregators, - randomIntBetween(1, 1000) + IntStream.range(0, nonBucketGroupings.size()).mapToObj(n -> new BlockHash.GroupSpec(5 + n, ElementType.BYTES_REF)).toList(), + List.of(new RateLongAggregatorFunctionSupplier(List.of(4, 2), unitInMillis)), + List.of(), + between(1, 100) ).get(ctx); - aggregators = List.of( - new RateLongAggregatorFunctionSupplier(List.of(2, 3, 4), unitInMillis).groupingAggregatorFactory(AggregatorMode.FINAL) - ); - Operator finalHash = new TimeSeriesAggregationOperatorFactory( - AggregatorMode.FINAL, + // tsid, bucket, rate[0][0],rate[0][1],rate[0][2], grouping1, grouping2 + Operator intermediateAgg = new TimeSeriesAggregationOperatorFactories.Intermediate( 0, 1, - interval, - aggregators, - randomIntBetween(1, 1000) + IntStream.range(0, nonBucketGroupings.size()).mapToObj(n -> new BlockHash.GroupSpec(5 + n, ElementType.BYTES_REF)).toList(), + List.of(new RateLongAggregatorFunctionSupplier(List.of(2, 3, 4), unitInMillis)), + List.of(), + between(1, 100) ).get(ctx); + // tsid, bucket, rate, grouping1, grouping2 + List finalGroups = new ArrayList<>(); + int groupChannel = 3; + for (String grouping : groupings) { + if (grouping.equals("bucket")) { + finalGroups.add(new BlockHash.GroupSpec(1, ElementType.LONG)); + } else { + finalGroups.add(new BlockHash.GroupSpec(groupChannel++, ElementType.BYTES_REF)); + } + } + Operator finalAgg = new TimeSeriesAggregationOperatorFactories.Final( + finalGroups, + List.of(new SumDoubleAggregatorFunctionSupplier(List.of(2))), + List.of(), + between(1, 100) + ).get(ctx); + List results = new ArrayList<>(); - var requestsField = new NumberFieldMapper.NumberFieldType("requests", NumberFieldMapper.NumberType.LONG); OperatorTestCase.runDriver( new Driver( ctx, sourceOperatorFactory.get(ctx), - List.of(ValuesSourceReaderOperatorTests.factory(reader, requestsField, ElementType.LONG).get(ctx), initialHash, finalHash), + CollectionUtils.concatLists(extractOperators, List.of(intialAgg, intermediateAgg, finalAgg)), new TestResultPageSinkOperator(results::add), () -> {} ) ); - Map rates = new HashMap<>(); + List> values = new ArrayList<>(); for (Page result : results) { - BytesRefBlock keysBlock = result.getBlock(0); - LongBlock timestampIntervalsBock = result.getBlock(1); - DoubleBlock ratesBlock = result.getBlock(2); - for (int i = 0; i < result.getPositionCount(); i++) { - var key = new Group(keysBlock.getBytesRef(i, new BytesRef()).utf8ToString(), timestampIntervalsBock.getLong(i)); - rates.put(key, ratesBlock.getDouble(i)); + for (int p = 0; p < result.getPositionCount(); p++) { + int blockCount = result.getBlockCount(); + List row = new ArrayList<>(); + for (int b = 0; b < blockCount; b++) { + row.add(BlockUtils.toJavaObject(result.getBlock(b), p)); + } + values.add(row); } result.releaseBlocks(); } - return rates; + values.sort((v1, v2) -> { + for (int i = 0; i < v1.size(); i++) { + if (v1.get(i) instanceof BytesRef b1) { + int cmp = b1.compareTo((BytesRef) v2.get(i)); + if (cmp != 0) { + return cmp; + } + } else if (v1.get(i) instanceof Long b1) { + int cmp = b1.compareTo((Long) v2.get(i)); + if (cmp != 0) { + return -cmp; + } + } + } + return 0; + }); + return values; } - - record Group(String tsidHash, long timestampInterval) {} } From c9b8d7239f038078f0736614d25f25ba19c319e5 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Thu, 9 May 2024 08:56:07 +0200 Subject: [PATCH 016/119] ES|QL: account for page overhead when calculating memory used by blocks (#108347) --- .../xpack/esql/heap_attack/HeapAttackIT.java | 1 - .../compute/data/BooleanArrayVector.java | 4 +- .../compute/data/BytesRefArrayVector.java | 4 +- .../compute/data/DoubleArrayVector.java | 4 +- .../compute/data/IntArrayVector.java | 4 +- .../compute/data/LongArrayVector.java | 4 +- .../org/elasticsearch/compute/data/Block.java | 12 ++++ .../compute/data/X-ArrayVector.java.st | 4 +- .../compute/data/BlockAccountingTests.java | 65 ++++++++----------- 9 files changed, 56 insertions(+), 46 deletions(-) diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java index 38f8ad4766b7..5c034a81fc9c 100644 --- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java @@ -269,7 +269,6 @@ public void testManyEval() throws IOException { assertMap(map, matchesMap().entry("columns", columns).entry("values", hasSize(10_000))); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/108104") public void testTooManyEval() throws IOException { initManyLongs(); assertCircuitBreaks(() -> manyEval(490)); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java index 3cebcd75cbe7..e195bda3a6db 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java @@ -23,7 +23,9 @@ final class BooleanArrayVector extends AbstractVector implements BooleanVector { static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(BooleanArrayVector.class) // TODO: remove these extra bytes once `asBlock` returns a block with a separate reference to the vector. - + RamUsageEstimator.shallowSizeOfInstance(BooleanVectorBlock.class); + + RamUsageEstimator.shallowSizeOfInstance(BooleanVectorBlock.class) + // TODO: remove this if/when we account for memory used by Pages + + Block.PAGE_MEM_OVERHEAD_PER_BLOCK; private final boolean[] values; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java index 81f507a4fa55..75cf4a2e1fe5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java @@ -25,7 +25,9 @@ final class BytesRefArrayVector extends AbstractVector implements BytesRefVector static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(BytesRefArrayVector.class) // TODO: remove these extra bytes once `asBlock` returns a block with a separate reference to the vector. - + RamUsageEstimator.shallowSizeOfInstance(BytesRefVectorBlock.class); + + RamUsageEstimator.shallowSizeOfInstance(BytesRefVectorBlock.class) + // TODO: remove this if/when we account for memory used by Pages + + Block.PAGE_MEM_OVERHEAD_PER_BLOCK; private final BytesRefArray values; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java index 451b6cc7b655..476d5e55c55a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java @@ -23,7 +23,9 @@ final class DoubleArrayVector extends AbstractVector implements DoubleVector { static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DoubleArrayVector.class) // TODO: remove these extra bytes once `asBlock` returns a block with a separate reference to the vector. - + RamUsageEstimator.shallowSizeOfInstance(DoubleVectorBlock.class); + + RamUsageEstimator.shallowSizeOfInstance(DoubleVectorBlock.class) + // TODO: remove this if/when we account for memory used by Pages + + Block.PAGE_MEM_OVERHEAD_PER_BLOCK; private final double[] values; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java index a2b6697a3863..97bf1675a9a3 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java @@ -23,7 +23,9 @@ final class IntArrayVector extends AbstractVector implements IntVector { static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(IntArrayVector.class) // TODO: remove these extra bytes once `asBlock` returns a block with a separate reference to the vector. - + RamUsageEstimator.shallowSizeOfInstance(IntVectorBlock.class); + + RamUsageEstimator.shallowSizeOfInstance(IntVectorBlock.class) + // TODO: remove this if/when we account for memory used by Pages + + Block.PAGE_MEM_OVERHEAD_PER_BLOCK; private final int[] values; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java index 6eec82528c8d..4b504943b760 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java @@ -23,7 +23,9 @@ final class LongArrayVector extends AbstractVector implements LongVector { static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LongArrayVector.class) // TODO: remove these extra bytes once `asBlock` returns a block with a separate reference to the vector. - + RamUsageEstimator.shallowSizeOfInstance(LongVectorBlock.class); + + RamUsageEstimator.shallowSizeOfInstance(LongVectorBlock.class) + // TODO: remove this if/when we account for memory used by Pages + + Block.PAGE_MEM_OVERHEAD_PER_BLOCK; private final long[] values; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index ed7ee93c9932..cfa1d3656ba3 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.unit.ByteSizeValue; @@ -44,6 +45,17 @@ public interface Block extends Accountable, BlockLoader.Block, NamedWriteable, R */ long MAX_LOOKUP = 100_000; + /** + * We do not track memory for pages directly (only for single blocks), + * but the page memory overhead can still be significant, especially for pages containing thousands of blocks. + * For now, we approximate this overhead, per block, using this value. + * + * The exact overhead per block would be (more correctly) {@link RamUsageEstimator#NUM_BYTES_OBJECT_REF}, + * but we approximate it with {@link RamUsageEstimator#NUM_BYTES_OBJECT_ALIGNMENT} to avoid further alignments + * to object size (at the end of the alignment, it would make no practical difference). + */ + int PAGE_MEM_OVERHEAD_PER_BLOCK = RamUsageEstimator.NUM_BYTES_OBJECT_ALIGNMENT; + /** * {@return an efficient dense single-value view of this block}. * Null, if the block is not dense single-valued. That is, if diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st index 7eeb7765e3b1..4afd8db62f84 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st @@ -38,7 +38,9 @@ final class $Type$ArrayVector extends AbstractVector implements $Type$Vector { static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance($Type$ArrayVector.class) // TODO: remove these extra bytes once `asBlock` returns a block with a separate reference to the vector. - + RamUsageEstimator.shallowSizeOfInstance($Type$VectorBlock.class); + + RamUsageEstimator.shallowSizeOfInstance($Type$VectorBlock.class) + // TODO: remove this if/when we account for memory used by Pages + + Block.PAGE_MEM_OVERHEAD_PER_BLOCK; $if(BytesRef)$ private final BytesRefArray values; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java index ae43e3954935..86bfec512094 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java @@ -42,9 +42,8 @@ public class BlockAccountingTests extends ComputeTestCase { public void testBooleanVector() { BlockFactory blockFactory = blockFactory(); Vector empty = blockFactory.newBooleanArrayVector(new boolean[] {}, 0); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - BooleanVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(BooleanVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Vector emptyPlusOne = blockFactory.newBooleanArrayVector(new boolean[] { randomBoolean() }, 1); @@ -62,9 +61,8 @@ public void testBooleanVector() { public void testIntVector() { BlockFactory blockFactory = blockFactory(); Vector empty = blockFactory.newIntArrayVector(new int[] {}, 0); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - IntVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(IntVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Vector emptyPlusOne = blockFactory.newIntArrayVector(new int[] { randomInt() }, 1); @@ -82,9 +80,8 @@ public void testIntVector() { public void testLongVector() { BlockFactory blockFactory = blockFactory(); Vector empty = blockFactory.newLongArrayVector(new long[] {}, 0); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - LongVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(LongVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Vector emptyPlusOne = blockFactory.newLongArrayVector(new long[] { randomLong() }, 1); @@ -103,9 +100,8 @@ public void testLongVector() { public void testDoubleVector() { BlockFactory blockFactory = blockFactory(); Vector empty = blockFactory.newDoubleArrayVector(new double[] {}, 0); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - DoubleVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(DoubleVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Vector emptyPlusOne = blockFactory.newDoubleArrayVector(new double[] { randomDouble() }, 1); @@ -127,9 +123,8 @@ public void testBytesRefVector() { var emptyArray = new BytesRefArray(0, blockFactory.bigArrays()); var arrayWithOne = new BytesRefArray(0, blockFactory.bigArrays()); Vector emptyVector = blockFactory.newBytesRefArrayVector(emptyArray, 0); - long expectedEmptyVectorUsed = RamUsageTester.ramUsed(emptyVector, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - BytesRefVectorBlock.class - ); + long expectedEmptyVectorUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(emptyVector, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(BytesRefVectorBlock.class); assertThat(emptyVector.ramBytesUsed(), is(expectedEmptyVectorUsed)); var bytesRef = new BytesRef(randomAlphaOfLengthBetween(1, 16)); @@ -146,9 +141,8 @@ public void testBytesRefVector() { public void testBooleanBlock() { BlockFactory blockFactory = blockFactory(); Block empty = new BooleanArrayBlock(new boolean[] {}, 0, new int[] { 0 }, null, Block.MvOrdering.UNORDERED, blockFactory); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - BooleanVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(BooleanVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Block emptyPlusOne = new BooleanArrayBlock( @@ -194,18 +188,16 @@ public void testBooleanBlockWithNullFirstValues() { Block.MvOrdering.UNORDERED, blockFactory() ); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - BooleanVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(BooleanVectorBlock.class); assertThat(empty.ramBytesUsed(), lessThanOrEqualTo(expectedEmptyUsed)); } public void testIntBlock() { BlockFactory blockFactory = blockFactory(); Block empty = new IntArrayBlock(new int[] {}, 0, new int[] { 0 }, null, Block.MvOrdering.UNORDERED, blockFactory); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - IntVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(IntVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Block emptyPlusOne = new IntArrayBlock( @@ -242,18 +234,16 @@ public void testIntBlock() { public void testIntBlockWithNullFirstValues() { BlockFactory blockFactory = blockFactory(); Block empty = new IntArrayBlock(new int[] {}, 0, null, BitSet.valueOf(new byte[] { 1 }), Block.MvOrdering.UNORDERED, blockFactory); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - IntVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(IntVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); } public void testLongBlock() { BlockFactory blockFactory = blockFactory(); Block empty = new LongArrayBlock(new long[] {}, 0, new int[] { 0 }, null, Block.MvOrdering.UNORDERED, blockFactory); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - LongVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(LongVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Block emptyPlusOne = new LongArrayBlock( @@ -299,18 +289,16 @@ public void testLongBlockWithNullFirstValues() { Block.MvOrdering.UNORDERED, blockFactory() ); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - LongVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(LongVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); } public void testDoubleBlock() { BlockFactory blockFactory = blockFactory(); Block empty = new DoubleArrayBlock(new double[] {}, 0, new int[] { 0 }, null, Block.MvOrdering.UNORDERED, blockFactory); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - DoubleVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(DoubleVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); Block emptyPlusOne = new DoubleArrayBlock( @@ -356,9 +344,8 @@ public void testDoubleBlockWithNullFirstValues() { Block.MvOrdering.UNORDERED, blockFactory() ); - long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + RamUsageEstimator.shallowSizeOfInstance( - DoubleVectorBlock.class - ); + long expectedEmptyUsed = Block.PAGE_MEM_OVERHEAD_PER_BLOCK + RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR) + + RamUsageEstimator.shallowSizeOfInstance(DoubleVectorBlock.class); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); } From 68a8664c2105bff5765c3fc0f6701ad21f23c2b1 Mon Sep 17 00:00:00 2001 From: Nick Tindall Date: Thu, 9 May 2024 17:17:10 +1000 Subject: [PATCH 017/119] [DOCS] Fix stored_fields parameter description (#98385) (#108445) (referenced from get and multi_get API docs) Closes #98385 --- docs/reference/rest-api/common-parms.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/rest-api/common-parms.asciidoc b/docs/reference/rest-api/common-parms.asciidoc index dd264c0e5bcd..a2a397c4efe6 100644 --- a/docs/reference/rest-api/common-parms.asciidoc +++ b/docs/reference/rest-api/common-parms.asciidoc @@ -1062,8 +1062,8 @@ end::stats[] tag::stored_fields[] `stored_fields`:: -(Optional, Boolean) If `true`, retrieves the document fields stored in the -index rather than the document `_source`. Defaults to `false`. +(Optional, string) +A comma-separated list of <> to include in the response. end::stored_fields[] tag::sync[] From 05d5abe94e76d26044936c282dcdc28033cad827 Mon Sep 17 00:00:00 2001 From: Andrew Wilkins Date: Thu, 9 May 2024 15:33:07 +0800 Subject: [PATCH 018/119] apm-data: ignore malformed fields, and too many dynamic fields (#108444) * apm-data: ignore_{malformed,dynamic_beyond_limit} Enable ignore_malformed on all non-metrics APM data streams, and enable ignore_dynamic_beyond_limit for all APM data streams. We can enable ignore_malformed on metrics data streams when https://github.com/elastic/elasticsearch/issues/90007 is fixed. * Update docs/changelog/108444.yaml --- docs/changelog/108444.yaml | 5 + .../component-templates/apm@settings.yaml | 4 + .../metrics-apm@settings.yaml | 6 ++ .../src/main/resources/resources.yaml | 2 +- .../test/30_lenient_mappings.yml | 100 ++++++++++++++++++ 5 files changed, 116 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/108444.yaml create mode 100644 x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_lenient_mappings.yml diff --git a/docs/changelog/108444.yaml b/docs/changelog/108444.yaml new file mode 100644 index 000000000000..c946ab24f939 --- /dev/null +++ b/docs/changelog/108444.yaml @@ -0,0 +1,5 @@ +pr: 108444 +summary: "Apm-data: ignore malformed fields, and too many dynamic fields" +area: Data streams +type: enhancement +issues: [] diff --git a/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@settings.yaml b/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@settings.yaml index 3ca15224dafc..75671948de11 100644 --- a/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@settings.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/component-templates/apm@settings.yaml @@ -8,3 +8,7 @@ template: sort: field: "@timestamp" order: desc + mapping: + ignore_malformed: true + total_fields: + ignore_dynamic_beyond_limit: true diff --git a/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@settings.yaml b/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@settings.yaml index e6c84b6ed06f..819d5d7eafb8 100644 --- a/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@settings.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/component-templates/metrics-apm@settings.yaml @@ -6,3 +6,9 @@ _meta: template: settings: codec: best_compression + mapping: + # apm@settings sets `ignore_malformed: true`, but we need + # to disable this for metrics since they use synthetic source, + # and this combination is incompatible with the + # aggregate_metric_double field type. + ignore_malformed: false diff --git a/x-pack/plugin/apm-data/src/main/resources/resources.yaml b/x-pack/plugin/apm-data/src/main/resources/resources.yaml index 0e27e454f867..772057d4931a 100644 --- a/x-pack/plugin/apm-data/src/main/resources/resources.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/resources.yaml @@ -1,7 +1,7 @@ # "version" holds the version of the templates and ingest pipelines installed # by xpack-plugin apm-data. This must be increased whenever an existing template or # pipeline is changed, in order for it to be updated on Elasticsearch upgrade. -version: 3 +version: 4 component-templates: # Data lifecycle. diff --git a/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_lenient_mappings.yml b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_lenient_mappings.yml new file mode 100644 index 000000000000..97265a9b81a7 --- /dev/null +++ b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/30_lenient_mappings.yml @@ -0,0 +1,100 @@ +--- +setup: + - do: + cluster.health: + wait_for_events: languid + + - do: + cluster.put_component_template: + name: "logs-apm.app@custom" + body: + template: + settings: + mapping: + total_fields: + limit: 20 + +--- +"Test ignore_malformed": + - do: + bulk: + index: traces-apm-testing + refresh: true + body: + # Passing a (non-coercable) string into a numeric field should not + # cause an indexing failure; it should just not be indexed. + - create: {} + - '{"@timestamp": "2017-06-22", "numeric_labels": {"key": "string"}}' + - create: {} + - '{"@timestamp": "2017-06-22", "numeric_labels": {"key": 123}}' + + - is_false: errors + + - do: + search: + index: traces-apm-testing + body: + fields: ["numeric_labels.*", "_ignored"] + - length: { hits.hits: 2 } + - match: { hits.hits.0.fields: {"_ignored": ["numeric_labels.key"]} } + - match: { hits.hits.1.fields: {"numeric_labels.key": [123.0]} } + +--- +"Test ignore_dynamic_beyond_limit": + - do: + bulk: + index: logs-apm.app.svc1-testing + refresh: true + body: + - create: {} + - {"@timestamp": "2017-06-22", "k1": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k2": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k3": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k4": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k5": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k6": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k7": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k8": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k9": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k10": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k11": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k12": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k13": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k14": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k15": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k16": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k17": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k18": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k19": ""} + - create: {} + - {"@timestamp": "2017-06-22", "k20": ""} + + - is_false: errors + + - do: + search: + index: logs-apm.app.svc1-testing + body: + query: + term: + _ignored: + value: k20 + - length: { hits.hits: 1 } From fa2f81353e3ec44b3945bfbf3792db2d9c5a6030 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Thu, 9 May 2024 09:59:56 +0200 Subject: [PATCH 019/119] [DOCS] Adds complete Cohere tutorial (#108415) --- .../search-your-data/cohere-es.asciidoc | 371 ++++++++++++++++++ .../search-your-data/semantic-search.asciidoc | 1 + 2 files changed, 372 insertions(+) create mode 100644 docs/reference/search/search-your-data/cohere-es.asciidoc diff --git a/docs/reference/search/search-your-data/cohere-es.asciidoc b/docs/reference/search/search-your-data/cohere-es.asciidoc new file mode 100644 index 000000000000..751cfebca8c7 --- /dev/null +++ b/docs/reference/search/search-your-data/cohere-es.asciidoc @@ -0,0 +1,371 @@ +[[cohere-es]] +=== Tutorial: Using Cohere with {es} +++++ +Using Cohere with {es} +++++ + +The instructions in this tutorial shows you how to compute embeddings with +Cohere using the {infer} API and store them for efficient vector or hybrid +search in {es}. This tutorial will use the Python {es} client to perform the +operations. + +You'll learn how to: + +* create an {infer} endpoint for text embedding using the Cohere service, +* create the necessary index mapping for the {es} index, +* build an {infer} pipeline to ingest documents into the index together with the +embeddings, +* perform hybrid search on the data, +* rerank search results by using Cohere's rerank model, +* design a RAG system with Cohere's Chat API. + +The tutorial uses the https://huggingface.co/datasets/mteb/scifact[SciFact] data +set. + +Refer to https://docs.cohere.com/docs/elasticsearch-and-cohere[Cohere's tutorial] +for an example using a different data set. + + +[discrete] +[[cohere-es-req]] +==== Requirements + +* A https://cohere.com/[Cohere account], +* an https://www.elastic.co/guide/en/cloud/current/ec-getting-started.html[Elastic Cloud] +account, +* Python 3.7 or higher. + + +[discrete] +[[cohere-es-packages]] +==== Istall required packages + +Install {es} and Cohere: + +[source,py] +------------------------------------------------------------ +!pip install elasticsearch +!pip install cohere +------------------------------------------------------------ + +Import the required packages: + +[source,py] +------------------------------------------------------------ +from elasticsearch import Elasticsearch, helpers +import cohere +import json +import requests +------------------------------------------------------------ + +[discrete] +[[cohere-es-client]] +==== Create the {es} client + +To create your {es} client, you need: +* https://www.elastic.co/search-labs/tutorials/install-elasticsearch/elastic-cloud#finding-your-cloud-id[your Cloud ID], +* https://www.elastic.co/search-labs/tutorials/install-elasticsearch/elastic-cloud#creating-an-api-key[an encoded API key]. + +[source,py] +------------------------------------------------------------ +ELASTICSEARCH_ENDPOINT = "elastic_endpoint" +ELASTIC_API_KEY = "elastic_api_key" + +client = Elasticsearch( + cloud_id=ELASTICSEARCH_ENDPOINT, + api_key=ELASTIC_API_KEY +) + +# Confirm the client has connected +print(client.info()) +------------------------------------------------------------ + + +[discrete] +[[cohere-es-infer-endpoint]] +==== Create the {infer} endpoint + +<> first. In this example, the +{infer} endpoint uses Cohere's `embed-english-v3.0` model and the +`embedding_type` is set to `byte`. + +[source,py] +------------------------------------------------------------ +COHERE_API_KEY = "cohere_api_key" + +client.inference.put_model( + task_type="text_embedding", + inference_id="cohere_embeddings", + body={ + "service": "cohere", + "service_settings": { + "api_key": COHERE_API_KEY, + "model_id": "embed-english-v3.0", + "embedding_type": "byte" + } + }, +) +------------------------------------------------------------ + +You can find your API keys in your Cohere dashboard under the +https://dashboard.cohere.com/api-keys[API keys section]. + + +[discrete] +[[cohere-es-index-mapping]] +==== Create the index mapping + +Create the index mapping for the index that will contain the embeddings. + +[source,py] +------------------------------------------------------------ +client.indices.create( + index="cohere-embeddings", + settings={"index": {"default_pipeline": "cohere_embeddings"}}, + mappings={ + "properties": { + "text_embedding": { + "type": "dense_vector", + "dims": 1024, + "element_type": "byte", + }, + "text": {"type": "text"}, + "id": {"type": "integer"}, + "title": {"type": "text"} + } + }, +) +------------------------------------------------------------ + + +[discrete] +[[cohere-es-infer-pipeline]] +==== Create the {infer} pipeline + +Now you have an {infer} endpoint and an index ready to store embeddings. The +next step is to create an <> with an +<> that will create the embeddings using +the {infer} endpoint and stores them in the index. + +[source,py] +-------------------------------------------------- +client.ingest.put_pipeline( + id="cohere_embeddings", + description="Ingest pipeline for Cohere inference.", + processors=[ + { + "inference": { + "model_id": "cohere_embeddings", + "input_output": { + "input_field": "text", + "output_field": "text_embedding", + }, + } + } + ], +) +-------------------------------------------------- + + +[discrete] +[[cohere-es-insert-documents]] +==== Prepare data and insert documents + +This example uses the https://huggingface.co/datasets/mteb/scifact[SciFact] data +set that you can find on HuggingFace. + +[source,py] +-------------------------------------------------- +url = 'https://huggingface.co/datasets/mteb/scifact/raw/main/corpus.jsonl' + +# Fetch the JSONL data from the URL +response = requests.get(url) +response.raise_for_status() # Ensure noticing bad responses + +# Split the content by new lines and parse each line as JSON +data = [json.loads(line) for line in response.text.strip().split('\n') if line] +# Now data is a list of dictionaries + +# Change `_id` key to `id` as `_id` is a reserved key in Elasticsearch. +for item in data: + if '_id' in item: + item['id'] = item.pop('_id') + +# Prepare the documents to be indexed +documents = [] +for line in data: + data_dict = line + documents.append({ + "_index": "cohere-embeddings", + "_source": data_dict, + } + ) + +# Use the bulk endpoint to index +helpers.bulk(client, documents) + +print("Data ingestion completed, text embeddings generated!") +-------------------------------------------------- + +Your index is populated with the SciFact data and text embeddings for the text +field. + + +[discrete] +[[cohere-es-hybrid-search]] +==== Hybrid search + +Let's start querying the index! + +The code below performs a hybrid search. The `kNN` query computes the relevance +of search results based on vector similarity using the `text_embedding` field, +the lexical search query uses BM25 retrieval to compute keyword similarity on +the `title` and `text` fields. + +[source,py] +-------------------------------------------------- +query = "What is biosimilarity?" + +response = client.search( + index="cohere-embeddings", + size=100, + knn={ + "field": "text_embedding", + "query_vector_builder": { + "text_embedding": { + "model_id": "cohere_embeddings", + "model_text": query, + } + }, + "k": 10, + "num_candidates": 50, + }, + query={ + "multi_match": { + "query": query, + "fields": ["text", "title"] + } + } +) + +raw_documents = response["hits"]["hits"] + +# Display the first 10 results +for document in raw_documents[0:10]: + print(f'Title: {document["_source"]["title"]}\nText: {document["_source"]["text"]}\n') + +# Format the documents for ranking +documents = [] +for hit in response["hits"]["hits"]: + documents.append(hit["_source"]["text"]) +-------------------------------------------------- + + +[discrete] +[[cohere-es-rerank-results]] +===== Rerank search results + +To combine the results more effectively, use +https://docs.cohere.com/docs/rerank-2[Cohere's Rerank v3] model through the +{infer} API to provide a more precise semantic reranking of the results. + +Create an {infer} endpoint with your Cohere API key and the used model name as +the `model_id` (`rerank-english-v3.0` in this example). + +[source,py] +-------------------------------------------------- +client.inference.put_model( + task_type="rerank", + inference_id="cohere_rerank", + body={ + "service": "cohere", + "service_settings":{ + "api_key": COHERE_API_KEY, + "model_id": "rerank-english-v3.0" + }, + "task_settings": { + "top_n": 10, + }, + } +) +-------------------------------------------------- + +Rerank the results using the new {infer} endpoint. + +[source,py] +-------------------------------------------------- +# Pass the query and the search results to the service +response = client.inference.inference( + inference_id="cohere_rerank", + body={ + "query": query, + "input": documents, + "task_settings": { + "return_documents": False + } + } +) + +# Reconstruct the input documents based on the index provided in the rereank response +ranked_documents = [] +for document in response.body["rerank"]: + ranked_documents.append({ + "title": raw_documents[int(document["index"])]["_source"]["title"], + "text": raw_documents[int(document["index"])]["_source"]["text"] + }) + +# Print the top 10 results +for document in ranked_documents[0:10]: + print(f"Title: {document['title']}\nText: {document['text']}\n") +-------------------------------------------------- + +The response is a list of documents in descending order of relevance. Each +document has a corresponding index that reflects the order of the documents when +they were sent to the {infer} endpoint. + + +[discrete] +[[cohere-es-rag]] +==== Retrieval Augmented Generation (RAG) with Cohere and {es} + +RAG is a method for generating text using additional information fetched from an +external data source. With the ranked results, you can build a RAG system on the +top of what you previously created by using +https://docs.cohere.com/docs/chat-api[Cohere's Chat API]. + +Pass in the retrieved documents and the query to receive a grounded response +using Cohere's newest generative model +https://docs.cohere.com/docs/command-r-plus[Command R+]. + +Then pass in the query and the documents to the Chat API, and print out the +response. + +[source,py] +-------------------------------------------------- +response = co.chat(message=query, documents=ranked_documents, model='command-r-plus') + +source_documents = [] +for citation in response.citations: + for document_id in citation.document_ids: + if document_id not in source_documents: + source_documents.append(document_id) + +print(f"Query: {query}") +print(f"Response: {response.text}") +print("Sources:") +for document in response.documents: + if document['id'] in source_documents: + print(f"{document['title']}: {document['text']}") + +-------------------------------------------------- + +The response will look similar to this: + +[source,consol-result] +-------------------------------------------------- +Query: What is biosimilarity? +Response: Biosimilarity is based on the comparability concept, which has been used successfully for several decades to ensure close similarity of a biological product before and after a manufacturing change. Over the last 10 years, experience with biosimilars has shown that even complex biotechnology-derived proteins can be copied successfully. +Sources: +Interchangeability of Biosimilars: A European Perspective: (...) +-------------------------------------------------- +// NOTCONSOLE diff --git a/docs/reference/search/search-your-data/semantic-search.asciidoc b/docs/reference/search/search-your-data/semantic-search.asciidoc index a4d892c98645..a1197e7bbbd3 100644 --- a/docs/reference/search/search-your-data/semantic-search.asciidoc +++ b/docs/reference/search/search-your-data/semantic-search.asciidoc @@ -136,3 +136,4 @@ include::{es-ref-dir}/tab-widgets/semantic-search/hybrid-search-widget.asciidoc[ include::semantic-search-elser.asciidoc[] include::semantic-search-inference.asciidoc[] +include::cohere-es.asciidoc[] From 864543b305da2eb16a5ade5e1453cdba245e7283 Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 9 May 2024 09:37:56 +0100 Subject: [PATCH 020/119] Move conceptual docs about `ActionListener` (#107875) This information is more discoverable as the class-level javadocs for `ActionListener` itself rather than hidden away in a separate Markdown file. Also this way the links all stay up to date. --- docs/internal/DistributedArchitectureGuide.md | 65 +------------- .../elasticsearch/action/ActionListener.java | 85 ++++++++++++++++++- 2 files changed, 82 insertions(+), 68 deletions(-) diff --git a/docs/internal/DistributedArchitectureGuide.md b/docs/internal/DistributedArchitectureGuide.md index 7f10a1b3a8ca..59305c630573 100644 --- a/docs/internal/DistributedArchitectureGuide.md +++ b/docs/internal/DistributedArchitectureGuide.md @@ -10,70 +10,7 @@ ### ActionListener -Callbacks are used extensively throughout Elasticsearch because they enable us to write asynchronous and nonblocking code, i.e. code which -doesn't necessarily compute a result straight away but also doesn't block the calling thread waiting for the result to become available. -They support several useful control flows: - -- They can be completed immediately on the calling thread. -- They can be completed concurrently on a different thread. -- They can be stored in a data structure and completed later on when the system reaches a particular state. -- Most commonly, they can be passed on to other methods that themselves require a callback. -- They can be wrapped in another callback which modifies the behaviour of the original callback, perhaps adding some extra code to run - before or after completion, before passing them on. - -`ActionListener` is a general-purpose callback interface that is used extensively across the Elasticsearch codebase. `ActionListener` is -used pretty much everywhere that needs to perform some asynchronous and nonblocking computation. The uniformity makes it easier to compose -parts of the system together without needing to build adapters to convert back and forth between different kinds of callback. It also makes -it easier to develop the skills needed to read and understand all the asynchronous code, although this definitely takes practice and is -certainly not easy in an absolute sense. Finally, it has allowed us to build a rich library for working with `ActionListener` instances -themselves, creating new instances out of existing ones and completing them in interesting ways. See for instance: - -- all the static methods on [ActionListener](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/ActionListener.java) itself -- [`ThreadedActionListener`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/ThreadedActionListener.java) for forking work elsewhere -- [`RefCountingListener`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/RefCountingListener.java) for running work in parallel -- [`SubscribableListener`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java) for constructing flexible workflows - -Callback-based asynchronous code can easily call regular synchronous code, but synchronous code cannot run callback-based asynchronous code -without blocking the calling thread until the callback is called back. This blocking is at best undesirable (threads are too expensive to -waste with unnecessary blocking) and at worst outright broken (the blocking can lead to deadlock). Unfortunately this means that most of our -code ends up having to be written with callbacks, simply because it's ultimately calling into some other code that takes a callback. The -entry points for all Elasticsearch APIs are callback-based (e.g. REST APIs all start at -[`org.elasticsearch.rest.BaseRestHandler#prepareRequest`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java#L158-L171), -and transport APIs all start at -[`org.elasticsearch.action.support.TransportAction#doExecute`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/TransportAction.java#L65)) -and the whole system fundamentally works in terms of an event loop (a `io.netty.channel.EventLoop`) which processes network events via -callbacks. - -`ActionListener` is not an _ad-hoc_ invention. Formally speaking, it is our implementation of the general concept of a continuation in the -sense of [_continuation-passing style_](https://en.wikipedia.org/wiki/Continuation-passing_style) (CPS): an extra argument to a function -which defines how to continue the computation when the result is available. This is in contrast to _direct style_ which is the more usual -style of calling methods that return values directly back to the caller so they can continue executing as normal. There's essentially two -ways that computation can continue in Java (it can return a value or it can throw an exception) which is why `ActionListener` has both an -`onResponse()` and an `onFailure()` method. - -CPS is strictly more expressive than direct style: direct code can be mechanically translated into continuation-passing style, but CPS also -enables all sorts of other useful control structures such as forking work onto separate threads, possibly to be executed in parallel, -perhaps even across multiple nodes, or possibly collecting a list of continuations all waiting for the same condition to be satisfied before -proceeding (e.g. -[`SubscribableListener`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/SubscribableListener.java) -amongst many others). Some languages have first-class support for continuations (e.g. the `async` and `await` primitives in C#) allowing the -programmer to write code in direct style away from those exotic control structures, but Java does not. That's why we have to manipulate all -the callbacks ourselves. - -Strictly speaking, CPS requires that a computation _only_ continues by calling the continuation. In Elasticsearch, this means that -asynchronous methods must have `void` return type and may not throw any exceptions. This is mostly the case in our code as written today, -and is a good guiding principle, but we don't enforce void exceptionless methods and there are some deviations from this rule. In -particular, it's not uncommon to permit some methods to throw an exception, using things like -[`ActionListener#run`](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/ActionListener.java#L381-L390) -(or an equivalent `try ... catch ...` block) further up the stack to handle it. Some methods also take (and may complete) an -`ActionListener` parameter, but still return a value separately for other local synchronous work. - -This pattern is often used in the transport action layer with the use of the -[ChannelActionListener](https://github.com/elastic/elasticsearch/blob/v8.12.2/server/src/main/java/org/elasticsearch/action/support/ChannelActionListener.java) -class, which wraps a `TransportChannel` produced by the transport layer. `TransportChannel` implementations can hold a reference to a Netty -channel with which to pass the response back to the network caller. Netty has a many-to-one association of network callers to channels, so a -call taking a long time generally won't hog resources: it's cheap. A transport action can take hours to respond and that's alright, barring -caller timeouts. +See the [Javadocs for `ActionListener`](https://github.com/elastic/elasticsearch/blob/main/server/src/main/java/org/elasticsearch/action/ActionListener.java) (TODO: add useful starter references and explanations for a range of Listener classes. Reference the Netty section.) diff --git a/server/src/main/java/org/elasticsearch/action/ActionListener.java b/server/src/main/java/org/elasticsearch/action/ActionListener.java index d07717857169..21f3df2ab717 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionListener.java +++ b/server/src/main/java/org/elasticsearch/action/ActionListener.java @@ -31,17 +31,94 @@ import static org.elasticsearch.action.ActionListenerImplementations.safeOnFailure; /** - * A listener for action responses or failures. + *

+ * Callbacks are used extensively throughout Elasticsearch because they enable us to write asynchronous and nonblocking code, i.e. code + * which doesn't necessarily compute a result straight away but also doesn't block the calling thread waiting for the result to become + * available. They support several useful control flows: + *

+ *
    + *
  • They can be completed immediately on the calling thread.
  • + *
  • They can be completed concurrently on a different thread.
  • + *
  • They can be stored in a data structure and completed later on when the system reaches a particular state.
  • + *
  • Most commonly, they can be passed on to other methods that themselves require a callback.
  • + *
  • They can be wrapped in another callback which modifies the behaviour of the original callback, perhaps adding some extra code to run + * before or after completion, before passing them on.
  • + *
+ *

+ * {@link ActionListener} is a general-purpose callback interface that is used extensively across the Elasticsearch codebase. {@link + * ActionListener} is used pretty much everywhere that needs to perform some asynchronous and nonblocking computation. The uniformity makes + * it easier to compose parts of the system together without needing to build adapters to convert back and forth between different kinds of + * callback. It also makes it easier to develop the skills needed to read and understand all the asynchronous code, although this definitely + * takes practice and is certainly not easy in an absolute sense. Finally, it has allowed us to build a rich library for working with {@link + * ActionListener} instances themselves, creating new instances out of existing ones and completing them in interesting ways. See for + * instance: + *

+ *
    + *
  • All the static methods on {@link ActionListener} itself.
  • + *
  • {@link org.elasticsearch.action.support.ThreadedActionListener} for forking work elsewhere.
  • + *
  • {@link org.elasticsearch.action.support.RefCountingListener} for running work in parallel.
  • + *
  • {@link org.elasticsearch.action.support.SubscribableListener} for constructing flexible workflows.
  • + *
+ *

+ * Callback-based asynchronous code can easily call regular synchronous code, but synchronous code cannot run callback-based asynchronous + * code without blocking the calling thread until the callback is called back. This blocking is at best undesirable (threads are too + * expensive to waste with unnecessary blocking) and at worst outright broken (the blocking can lead to deadlock). Unfortunately this means + * that most of our code ends up having to be written with callbacks, simply because it's ultimately calling into some other code that takes + * a callback. The entry points for all Elasticsearch APIs are callback-based (e.g. REST APIs all start at {@link + * org.elasticsearch.rest.BaseRestHandler}{@code #prepareRequest} and transport APIs all start at {@link + * org.elasticsearch.action.support.TransportAction}{@code #doExecute} and the whole system fundamentally works in terms of an event loop + * (an {@code io.netty.channel.EventLoop}) which processes network events via callbacks. + *

+ *

+ * {@link ActionListener} is not an ad-hoc invention. Formally speaking, it is our implementation of the general concept of a + * continuation in the sense of continuation-passing style + * (CPS): an extra argument to a function which defines how to continue the computation when the result is available. This is in contrast to + * direct style which is the more usual style of calling methods that return values directly back to the caller so they can continue + * executing as normal. There's essentially two ways that computation can continue in Java (it can return a value or it can throw an + * exception) which is why {@link ActionListener} has both an {@link #onResponse} and an {@link #onFailure} method. + *

+ *

+ * CPS is strictly more expressive than direct style: direct code can be mechanically translated into continuation-passing style, but CPS + * also enables all sorts of other useful control structures such as forking work onto separate threads, possibly to be executed in + * parallel, perhaps even across multiple nodes, or possibly collecting a list of continuations all waiting for the same condition to be + * satisfied before proceeding (e.g. {@link org.elasticsearch.action.support.SubscribableListener} amongst many others). Some languages have + * first-class support for continuations (e.g. the {@code async} and {@code await} primitives in C#) allowing the programmer to write code + * in direct style away from those exotic control structures, but Java does not. That's why we have to manipulate all the callbacks + * ourselves. + *

+ *

+ * Strictly speaking, CPS requires that a computation only continues by calling the continuation. In Elasticsearch, this means that + * asynchronous methods must have {@code void} return type and may not throw any exceptions. This is mostly the case in our code as written + * today, and is a good guiding principle, but we don't enforce void exceptionless methods and there are some deviations from this rule. In + * particular, it's not uncommon to permit some methods to throw an exception, using things like {@link ActionListener#run} (or an + * equivalent {@code try ... catch ...} block) further up the stack to handle it. Some methods also take (and may complete) an {@link + * ActionListener} parameter, but still return a value separately for other local synchronous work. + *

+ *

+ * This pattern is often used in the transport action layer with the use of the {@link + * org.elasticsearch.action.support.ChannelActionListener} class, which wraps a {@link org.elasticsearch.transport.TransportChannel} + * produced by the transport layer.{@link org.elasticsearch.transport.TransportChannel} implementations can hold a reference to a Netty + * channel with which to pass the response back to the network caller. Netty has a many-to-one association of network callers to channels, + * so a call taking a long time generally won't hog resources: it's cheap. A transport action can take hours to respond and that's alright, + * barring caller timeouts. + *

+ *

+ * Note that we explicitly avoid {@link java.util.concurrent.CompletableFuture} and other similar mechanisms as much as possible. They + * can achieve the same goals as {@link ActionListener}, but can also easily be misused in various ways that lead to severe bugs. In + * particular, futures support blocking while waiting for a result, but this is almost never appropriate in Elasticsearch's production code + * where threads are such a precious resource. Moreover if something throws an {@link Error} then the JVM should exit pretty much straight + * away, but {@link java.util.concurrent.CompletableFuture} can catch an {@link Error} which delays the JVM exit until its result is + * observed. This may be much later, or possibly even never. It's not possible to introduce such bugs when using {@link ActionListener}. + *

*/ public interface ActionListener { /** - * Handle action response. This response may constitute a failure or a - * success but it is up to the listener to make that decision. + * Complete this listener with a successful (or at least, non-exceptional) response. */ void onResponse(Response response); /** - * A failure caused by an exception at some phase of the task. + * Complete this listener with an exceptional response. */ void onFailure(Exception e); From f5b356d11c3a6760a9487ef38587db0f5a8d06b7 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Thu, 9 May 2024 10:15:56 +0100 Subject: [PATCH 021/119] Fix race in SpawnerNoBootstrapTests (#108416) --- .../bootstrap/SpawnerNoBootstrapTests.java | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java index c4aa3c9b1f1e..08e3ac2cbce8 100644 --- a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java +++ b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java @@ -39,8 +39,10 @@ import java.util.concurrent.TimeUnit; import java.util.function.Function; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; /** * Create a simple "daemon controller", put it in the right place and check that it runs. @@ -70,13 +72,13 @@ public class SpawnerNoBootstrapTests extends LuceneTestCase { static class ExpectedStreamMessage implements MockLogAppender.LoggingExpectation { final String expectedLogger; final String expectedMessage; - final CountDownLatch matchCalledLatch; - boolean saw; + final CountDownLatch matched; + volatile boolean saw; - ExpectedStreamMessage(String logger, String message, CountDownLatch matchCalledLatch) { + ExpectedStreamMessage(String logger, String message, CountDownLatch matched) { this.expectedLogger = logger; this.expectedMessage = message; - this.matchCalledLatch = matchCalledLatch; + this.matched = matched; } @Override @@ -85,8 +87,8 @@ public void match(LogEvent event) { && event.getLevel().equals(Level.WARN) && event.getMessage().getFormattedMessage().equals(expectedMessage)) { saw = true; + matched.countDown(); } - matchCalledLatch.countDown(); } @Override @@ -130,7 +132,7 @@ public void testNoControllerSpawn() throws IOException { try (Spawner spawner = new Spawner()) { spawner.spawnNativeControllers(environment); - assertThat(spawner.getProcesses(), hasSize(0)); + assertThat(spawner.getProcesses(), is(empty())); } } @@ -229,7 +231,7 @@ private void assertControllerSpawns(final Function pluginsDir // fail if the process does not die within one second; usually it will be even quicker but it depends on OS scheduling assertTrue(process.waitFor(1, TimeUnit.SECONDS)); } else { - assertThat(processes, hasSize(0)); + assertThat(processes, is(empty())); } appender.assertAllExpectationsMatched(); } From 06a07587699f9c39b4c793e45f3576c44fa81885 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Istv=C3=A1n=20Zolt=C3=A1n=20Szab=C3=B3?= Date: Thu, 9 May 2024 15:09:11 +0200 Subject: [PATCH 022/119] [DOCS] Fixes typo in Cohere ES tutorial (#108456) * [DOCS] Fixes typo in Cohere ES tutorial. * [DOCS] Fixes list. --- docs/reference/search/search-your-data/cohere-es.asciidoc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/reference/search/search-your-data/cohere-es.asciidoc b/docs/reference/search/search-your-data/cohere-es.asciidoc index 751cfebca8c7..f12f23ad2c5d 100644 --- a/docs/reference/search/search-your-data/cohere-es.asciidoc +++ b/docs/reference/search/search-your-data/cohere-es.asciidoc @@ -38,7 +38,7 @@ account, [discrete] [[cohere-es-packages]] -==== Istall required packages +==== Install required packages Install {es} and Cohere: @@ -63,6 +63,7 @@ import requests ==== Create the {es} client To create your {es} client, you need: + * https://www.elastic.co/search-labs/tutorials/install-elasticsearch/elastic-cloud#finding-your-cloud-id[your Cloud ID], * https://www.elastic.co/search-labs/tutorials/install-elasticsearch/elastic-cloud#creating-an-api-key[an encoded API key]. From 5a612d4100133b28730dfacb1eaa94a749183ce1 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 9 May 2024 09:32:46 -0400 Subject: [PATCH 023/119] ESQL: Remove remaining IT_tests_only (#108434) This moves examples from files marked to run in integration tests only to the files where they belong and disables this pattern matching. We now use supported features. --- .../esql/processing-commands/enrich.asciidoc | 16 ++--- .../resources/docs-IT_tests_only.csv-spec | 67 ------------------- .../src/main/resources/enrich.csv-spec | 54 ++++++++++++++- .../elasticsearch/xpack/esql/CsvTests.java | 9 ++- 4 files changed, 64 insertions(+), 82 deletions(-) delete mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs-IT_tests_only.csv-spec diff --git a/docs/reference/esql/processing-commands/enrich.asciidoc b/docs/reference/esql/processing-commands/enrich.asciidoc index f73eea6018cb..5470d81b2f40 100644 --- a/docs/reference/esql/processing-commands/enrich.asciidoc +++ b/docs/reference/esql/processing-commands/enrich.asciidoc @@ -57,11 +57,11 @@ in this example). `ENRICH` will look for records in the [source.merge.styled,esql] ---- -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich] +include::{esql-specs}/enrich.csv-spec[tag=enrich] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich-result] +include::{esql-specs}/enrich.csv-spec[tag=enrich-result] |=== To use a column with a different name than the `match_field` defined in the @@ -69,11 +69,11 @@ policy as the match field, use `ON `: [source.merge.styled,esql] ---- -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich_on] +include::{esql-specs}/enrich.csv-spec[tag=enrich_on] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich_on-result] +include::{esql-specs}/enrich.csv-spec[tag=enrich_on-result] |=== By default, each of the enrich fields defined in the policy is added as a @@ -82,22 +82,22 @@ column. To explicitly select the enrich fields that are added, use [source.merge.styled,esql] ---- -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich_with] +include::{esql-specs}/enrich.csv-spec[tag=enrich_with] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich_with-result] +include::{esql-specs}/enrich.csv-spec[tag=enrich_with-result] |=== You can rename the columns that are added using `WITH new_name=`: [source.merge.styled,esql] ---- -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich_rename] +include::{esql-specs}/enrich.csv-spec[tag=enrich_rename] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich_rename-result] +include::{esql-specs}/enrich.csv-spec[tag=enrich_rename-result] |=== In case of name collisions, the newly created columns will override existing diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs-IT_tests_only.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs-IT_tests_only.csv-spec deleted file mode 100644 index f4bf2333cae8..000000000000 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs-IT_tests_only.csv-spec +++ /dev/null @@ -1,67 +0,0 @@ -// This file contains any ESQL snippets from the docs that don't have a home -// anywhere else. The Isle of Misfit Toys. When you need to add new examples -// for the docs you should try to convert an existing test first. Just add -// the comments in whatever file the test already lives in. If you have to -// write a new test to make an example in the docs then put it in whatever -// file matches it's "theme" best. Put it next to similar tests. Not here. - -// Also! When Nik originally extracted examples from the docs to make them -// testable he didn't spend a lot of time putting the docs into appropriate -// files. He just made this one. He didn't put his toys away. We'd be better -// off not adding to this strange toy-pile and instead moving things into -// the appropriate files. - -enrich -// tag::enrich[] -ROW language_code = "1" -| ENRICH languages_policy -// end::enrich[] -; - -// tag::enrich-result[] -language_code:keyword | language_name:keyword -1 | English -// end::enrich-result[] -; - - -enrichOn -// tag::enrich_on[] -ROW a = "1" -| ENRICH languages_policy ON a -// end::enrich_on[] -; - -// tag::enrich_on-result[] -a:keyword | language_name:keyword -1 | English -// end::enrich_on-result[] -; - - -enrichWith -// tag::enrich_with[] -ROW a = "1" -| ENRICH languages_policy ON a WITH language_name -// end::enrich_with[] -; - -// tag::enrich_with-result[] -a:keyword | language_name:keyword -1 | English -// end::enrich_with-result[] -; - - -enrichRename -// tag::enrich_rename[] -ROW a = "1" -| ENRICH languages_policy ON a WITH name = language_name -// end::enrich_rename[] -; - -// tag::enrich_rename-result[] -a:keyword | name:keyword -1 | English -// end::enrich_rename-result[] -; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec index e84e79748c17..f044989ec9cc 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec @@ -34,12 +34,31 @@ median_duration:double | env:keyword simple required_feature: esql.enrich_load -row language_code = "1" -| enrich languages_policy +// tag::enrich[] +ROW language_code = "1" +| ENRICH languages_policy +// end::enrich[] ; +// tag::enrich-result[] language_code:keyword | language_name:keyword 1 | English +// end::enrich-result[] +; + +enrichOnSimple +required_feature: esql.enrich_load + +// tag::enrich_on[] +ROW a = "1" +| ENRICH languages_policy ON a +// end::enrich_on[] +; + +// tag::enrich_on-result[] +a:keyword | language_name:keyword +1 | English +// end::enrich_on-result[] ; @@ -83,6 +102,22 @@ emp_no:integer | x:keyword | language_name:keyword ; +withSimple +required_feature: esql.enrich_load + +// tag::enrich_with[] +ROW a = "1" +| ENRICH languages_policy ON a WITH language_name +// end::enrich_with[] +; + +// tag::enrich_with-result[] +a:keyword | language_name:keyword +1 | English +// end::enrich_with-result[] +; + + withAlias required_feature: esql.enrich_load @@ -95,6 +130,21 @@ emp_no:integer | x:keyword | lang:keyword 10003 | 4 | German ; +withAliasSimple +required_feature: esql.enrich_load + +// tag::enrich_rename[] +ROW a = "1" +| ENRICH languages_policy ON a WITH name = language_name +// end::enrich_rename[] +; + +// tag::enrich_rename-result[] +a:keyword | name:keyword +1 | English +// end::enrich_rename-result[] +; + withAliasSort required_feature: esql.enrich_load diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 3539138e670e..86f595810a49 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -110,6 +110,8 @@ import static org.elasticsearch.xpack.ql.CsvSpecReader.specParser; import static org.elasticsearch.xpack.ql.TestUtils.classpathResources; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -144,7 +146,6 @@ public class CsvTests extends ESTestCase { private static final Logger LOGGER = LogManager.getLogger(CsvTests.class); - private static final String IGNORED_CSV_FILE_NAMES_PATTERN = "-IT_tests_only"; private final String fileName; private final String groupName; @@ -164,10 +165,8 @@ public class CsvTests extends ESTestCase { @ParametersFactory(argumentFormatting = "%2$s.%3$s") public static List readScriptSpec() throws Exception { - List urls = classpathResources("/*.csv-spec").stream() - .filter(x -> x.toString().contains(IGNORED_CSV_FILE_NAMES_PATTERN) == false) - .toList(); - assertTrue("Not enough specs found " + urls, urls.size() > 0); + List urls = classpathResources("/*.csv-spec"); + assertThat("Not enough specs found " + urls, urls, hasSize(greaterThan(0))); return SpecReader.readScriptSpec(urls, specParser()); } From e178684d32a32b7222547cd09bc6c21ce58235c2 Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Thu, 9 May 2024 10:33:14 -0400 Subject: [PATCH 024/119] Correct typo in documentation (#108462) Correct a small typo: one closing ">" was missing. --- .../tab-widgets/semantic-search/hybrid-search.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/tab-widgets/semantic-search/hybrid-search.asciidoc b/docs/reference/tab-widgets/semantic-search/hybrid-search.asciidoc index 47403df450bd..93edc0918614 100644 --- a/docs/reference/tab-widgets/semantic-search/hybrid-search.asciidoc +++ b/docs/reference/tab-widgets/semantic-search/hybrid-search.asciidoc @@ -1,7 +1,7 @@ // tag::elser[] Hybrid search between a semantic and lexical query can be achieved by using an -< as part of your search request. Provide a +<> as part of your search request. Provide a `text_expansion` query and a full-text query as <> for the `rrf` retriever. The `rrf` retriever uses <> to rank the top documents. From 4dcbc3bd76eeb9f25334d9fadf7c6e8d8bb3d6eb Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Thu, 9 May 2024 17:34:06 +0300 Subject: [PATCH 025/119] Version-guard checking for lossy params in _source (#108460) --- .../elasticsearch/index/IndexVersions.java | 1 + .../index/mapper/SourceFieldMapper.java | 13 ++++-- .../mapper/DynamicFieldsBuilderTests.java | 2 +- .../index/mapper/SourceFieldMapperTests.java | 42 +++++++++++++++++++ .../query/SearchExecutionContextTests.java | 2 +- 5 files changed, 54 insertions(+), 6 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 78f07c8a137b..f076ee0be554 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -105,6 +105,7 @@ private static IndexVersion def(int id, Version luceneVersion) { public static final IndexVersion TIME_SERIES_ROUTING_HASH_IN_ID = def(8_504_00_0, Version.LUCENE_9_10_0); public static final IndexVersion DEFAULT_DENSE_VECTOR_TO_INT8_HNSW = def(8_505_00_0, Version.LUCENE_9_10_0); public static final IndexVersion DOC_VALUES_FOR_IGNORED_META_FIELD = def(8_505_00_1, Version.LUCENE_9_10_0); + public static final IndexVersion SOURCE_MAPPER_LOSSY_PARAMS_CHECK = def(8_506_00_0, Version.LUCENE_9_10_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index 233faf462400..1b6d6dd1141f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -134,10 +134,11 @@ public static class Builder extends MetadataFieldMapper.Builder { private final boolean supportsNonDefaultParameterValues; - public Builder(IndexMode indexMode, final Settings settings) { + public Builder(IndexMode indexMode, final Settings settings, boolean supportsCheckForNonDefaultParams) { super(Defaults.NAME); this.indexMode = indexMode; - this.supportsNonDefaultParameterValues = settings.getAsBoolean(LOSSY_PARAMETERS_ALLOWED_SETTING_NAME, true); + this.supportsNonDefaultParameterValues = supportsCheckForNonDefaultParams == false + || settings.getAsBoolean(LOSSY_PARAMETERS_ALLOWED_SETTING_NAME, true); } public Builder setSynthetic() { @@ -212,7 +213,11 @@ public SourceFieldMapper build() { c -> c.getIndexSettings().getMode() == IndexMode.TIME_SERIES ? c.getIndexSettings().getIndexVersionCreated().onOrAfter(IndexVersions.V_8_7_0) ? TSDB_DEFAULT : TSDB_LEGACY_DEFAULT : DEFAULT, - c -> new Builder(c.getIndexSettings().getMode(), c.getSettings()) + c -> new Builder( + c.getIndexSettings().getMode(), + c.getSettings(), + c.indexVersionCreated().onOrAfter(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK) + ) ); static final class SourceFieldType extends MappedFieldType { @@ -347,7 +352,7 @@ protected String contentType() { @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(indexMode, Settings.EMPTY).init(this); + return new Builder(indexMode, Settings.EMPTY, false).init(this); } /** diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java index 229e2e6f72cc..bab046d41b6e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicFieldsBuilderTests.java @@ -68,7 +68,7 @@ public void testCreateDynamicStringFieldAsKeywordForDimension() throws IOExcepti XContentParser parser = createParser(JsonXContent.jsonXContent, source); SourceToParse sourceToParse = new SourceToParse("test", new BytesArray(source), XContentType.JSON); - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).add( new PassThroughObjectMapper.Builder("labels").setContainsDimensions().dynamic(ObjectMapper.Dynamic.TRUE) ).build(MapperBuilderContext.root(false, false)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java index a5264512d808..802a18645eab 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java @@ -13,6 +13,8 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; @@ -298,4 +300,44 @@ public void testSupportsNonDefaultParameterValues() throws IOException { ); assertThat(e.getMessage(), containsString("Parameters [enabled,includes,excludes] are not allowed in source")); } + + public void testBypassCheckForNonDefaultParameterValuesInEarlierVersions() throws IOException { + Settings settings = Settings.builder().put(SourceFieldMapper.LOSSY_PARAMETERS_ALLOWED_SETTING_NAME, false).build(); + { + var sourceFieldMapper = createMapperService( + IndexVersionUtils.getPreviousVersion(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK), + settings, + () -> true, + topMapping(b -> b.startObject("_source").field("enabled", false).endObject()) + ).documentMapper().sourceMapper(); + assertThat(sourceFieldMapper, notNullValue()); + } + { + var sourceFieldMapper = createMapperService( + IndexVersionUtils.getPreviousVersion(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK), + settings, + () -> true, + topMapping(b -> b.startObject("_source").array("includes", "foo").endObject()) + ).documentMapper().sourceMapper(); + assertThat(sourceFieldMapper, notNullValue()); + } + { + var sourceFieldMapper = createMapperService( + IndexVersionUtils.getPreviousVersion(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK), + settings, + () -> true, + topMapping(b -> b.startObject("_source").array("excludes", "foo").endObject()) + ).documentMapper().sourceMapper(); + assertThat(sourceFieldMapper, notNullValue()); + } + { + var sourceFieldMapper = createMapperService( + IndexVersionUtils.getPreviousVersion(IndexVersions.SOURCE_MAPPER_LOSSY_PARAMS_CHECK), + settings, + () -> true, + topMapping(b -> b.startObject("_source").field("mode", "disabled").endObject()) + ).documentMapper().sourceMapper(); + assertThat(sourceFieldMapper, notNullValue()); + } + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java index 3085ff89603c..e541c680ada1 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java @@ -382,7 +382,7 @@ public void testSearchRequestRuntimeFieldsAndMultifieldDetection() { public void testSyntheticSourceSearchLookup() throws IOException { // Build a mapping using synthetic source - SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY).setSynthetic().build(); + SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false).setSynthetic().build(); RootObjectMapper root = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).add( new KeywordFieldMapper.Builder("cat", IndexVersion.current()).ignoreAbove(100) ).build(MapperBuilderContext.root(true, false)); From 0a8c6d28c108c0979a10d572a9a2bf3cc882696d Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Thu, 9 May 2024 07:52:04 -0700 Subject: [PATCH 026/119] Log running tasks in EsqlDisruptionIT (#108440) This PR logs tasks that are running after the disruption is cleared, allowing us to investigate why the disruption tests failed in #107347. Relates #107347 --- .../xpack/esql/action/EsqlActionIT.java | 1 - .../xpack/esql/action/EsqlDisruptionIT.java | 17 +++++++++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java index 1bc9bd4766c2..686fb831aa04 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java @@ -121,7 +121,6 @@ public void testRow() { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107347") public void testFromStatsGroupingAvgWithSort() { testFromStatsGroupingAvgImpl("from test | stats avg(count) by data | sort data | limit 2", "data", "avg(count)"); } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlDisruptionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlDisruptionIT.java index 4bbcff44ec74..e005e2143522 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlDisruptionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlDisruptionIT.java @@ -8,12 +8,14 @@ package org.elasticsearch.xpack.esql.action; import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction; import org.elasticsearch.cluster.coordination.Coordinator; import org.elasticsearch.cluster.coordination.FollowersChecker; import org.elasticsearch.cluster.coordination.LeaderChecker; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.health.node.selection.HealthNode; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.disruption.NetworkDisruption; @@ -91,6 +93,21 @@ private EsqlQueryResponse runQueryWithDisruption(EsqlQueryRequest request) { try { return future.actionGet(2, TimeUnit.MINUTES); } catch (Exception e) { + logger.info( + "running tasks: {}", + client().admin() + .cluster() + .prepareListTasks() + .get() + .getTasks() + .stream() + .filter( + // Skip the tasks we that'd get in the way while debugging + t -> false == t.action().contains(TransportListTasksAction.TYPE.name()) + && false == t.action().contains(HealthNode.TASK_NAME) + ) + .toList() + ); assertTrue("request must be failed or completed after clearing disruption", future.isDone()); ensureBlocksReleased(); logger.info("--> failed to execute esql query with disruption; retrying...", e); From 1b7cad185ee015a93d62c419d3d29a79ef3ae567 Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Thu, 9 May 2024 17:57:53 +0300 Subject: [PATCH 027/119] ESQL: Add aggregates node level reduction (#107876) * Add aggregation intermediate reduction level and estimatedRowSize computed value --- docs/changelog/107876.yaml | 5 +++ .../xpack/esql/action/EsqlActionTaskIT.java | 40 +++++++++++++++++-- .../esql/plan/physical/AggregateExec.java | 4 ++ .../AbstractPhysicalOperationProviders.java | 37 ++++++++++------- .../xpack/esql/planner/Mapper.java | 2 +- .../xpack/esql/planner/PlannerUtils.java | 23 +++++------ 6 files changed, 79 insertions(+), 32 deletions(-) create mode 100644 docs/changelog/107876.yaml diff --git a/docs/changelog/107876.yaml b/docs/changelog/107876.yaml new file mode 100644 index 000000000000..21624cacf7e1 --- /dev/null +++ b/docs/changelog/107876.yaml @@ -0,0 +1,5 @@ +pr: 107876 +summary: "ESQL: Add aggregates node level reduction" +area: ES|QL +type: enhancement +issues: [] diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index a1cd71da6c63..d18bf0e23fd2 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -81,6 +81,7 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { @Before public void setup() { assumeTrue("requires query pragmas", canUseQueryPragmas()); + nodeLevelReduction = randomBoolean(); READ_DESCRIPTION = """ \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 2147483647] \\_ValuesSourceReaderOperator[fields = [pause_me]] @@ -92,10 +93,10 @@ public void setup() { \\_ProjectOperator[projection = [0]] \\_LimitOperator[limit = 1000] \\_OutputOperator[columns = [sum(pause_me)]]"""; - REDUCE_DESCRIPTION = """ - \\_ExchangeSourceOperator[] - \\_ExchangeSinkOperator"""; - nodeLevelReduction = randomBoolean(); + REDUCE_DESCRIPTION = "\\_ExchangeSourceOperator[]\n" + + (nodeLevelReduction ? "\\_AggregationOperator[mode = INTERMEDIATE, aggs = sum of longs]\n" : "") + + "\\_ExchangeSinkOperator"; + } public void testTaskContents() throws Exception { @@ -480,6 +481,37 @@ public void testTaskContentsForLimitQuery() throws Exception { } } + public void testTaskContentsForGroupingStatsQuery() throws Exception { + READ_DESCRIPTION = """ + \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 2147483647] + \\_ValuesSourceReaderOperator[fields = [foo]] + \\_OrdinalsGroupingOperator(aggs = max of longs) + \\_ExchangeSinkOperator""".replace("pageSize()", Integer.toString(pageSize())); + MERGE_DESCRIPTION = """ + \\_ExchangeSourceOperator[] + \\_HashAggregationOperator[mode = , aggs = max of longs] + \\_ProjectOperator[projection = [1, 0]] + \\_LimitOperator[limit = 1000] + \\_OutputOperator[columns = [max(foo), pause_me]]"""; + REDUCE_DESCRIPTION = "\\_ExchangeSourceOperator[]\n" + + (nodeLevelReduction ? "\\_HashAggregationOperator[mode = , aggs = max of longs]\n" : "") + + "\\_ExchangeSinkOperator"; + + ActionFuture response = startEsql("from test | stats max(foo) by pause_me"); + try { + getTasksStarting(); + scriptPermits.release(pageSize()); + getTasksRunning(); + } finally { + scriptPermits.release(numberOfDocs()); + try (EsqlQueryResponse esqlResponse = response.get()) { + var it = Iterators.flatMap(esqlResponse.values(), i -> i); + assertThat(it.next(), equalTo(numberOfDocs() - 1L)); // max of numberOfDocs() generated int values + assertThat(it.next(), equalTo(1L)); // pause_me always emits 1 + } + } + } + @Override protected Collection> nodePlugins() { return CollectionUtils.appendToCopy(super.nodePlugins(), MockTransportService.TestPlugin.class); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/AggregateExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/AggregateExec.java index 9feb5e9b009d..490ec174eea5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/AggregateExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/AggregateExec.java @@ -68,6 +68,10 @@ public List aggregates() { return aggregates; } + public AggregateExec withMode(Mode newMode) { + return new AggregateExec(source(), child(), groupings, aggregates, newMode, estimatedRowSize); + } + /** * Estimate of the number of bytes that'll be loaded per position before * the stream of pages is consumed. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java index 8c5392ccc178..f5e4dead6734 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AbstractPhysicalOperationProviders.java @@ -20,6 +20,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; +import org.elasticsearch.xpack.esql.plan.physical.ExchangeSourceExec; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner.LocalExecutionPlannerContext; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner.PhysicalOperation; import org.elasticsearch.xpack.ql.InvalidArgumentException; @@ -54,6 +55,20 @@ public final PhysicalOperation groupingPhysicalOperation( var aggregates = aggregateExec.aggregates(); var sourceLayout = source.layout; + AggregatorMode aggregatorMode; + + if (mode == AggregateExec.Mode.FINAL) { + aggregatorMode = AggregatorMode.FINAL; + } else if (mode == AggregateExec.Mode.PARTIAL) { + if (aggregateExec.child() instanceof ExchangeSourceExec) {// the reducer step at data node (local) level + aggregatorMode = AggregatorMode.INTERMEDIATE; + } else { + aggregatorMode = AggregatorMode.INITIAL; + } + } else { + assert false : "Invalid aggregator mode [" + mode + "]"; + aggregatorMode = AggregatorMode.SINGLE; + } if (aggregateExec.groupings().isEmpty()) { // not grouping @@ -65,20 +80,18 @@ public final PhysicalOperation groupingPhysicalOperation( } else { layout.append(aggregateMapper.mapNonGrouping(aggregates)); } + // create the agg factories aggregatesToFactory( aggregates, - mode, + aggregatorMode, sourceLayout, false, // non-grouping s -> aggregatorFactories.add(s.supplier.aggregatorFactory(s.mode)) ); if (aggregatorFactories.isEmpty() == false) { - operatorFactory = new AggregationOperator.AggregationOperatorFactory( - aggregatorFactories, - mode == AggregateExec.Mode.FINAL ? AggregatorMode.FINAL : AggregatorMode.INITIAL - ); + operatorFactory = new AggregationOperator.AggregationOperatorFactory(aggregatorFactories, aggregatorMode); } } else { // grouping @@ -136,7 +149,7 @@ else if (mode == AggregateExec.Mode.PARTIAL) { // create the agg factories aggregatesToFactory( aggregates, - mode, + aggregatorMode, sourceLayout, true, // grouping s -> aggregatorFactories.add(s.supplier.groupingAggregatorFactory(s.mode)) @@ -219,7 +232,7 @@ private record AggFunctionSupplierContext(AggregatorFunctionSupplier supplier, A private void aggregatesToFactory( List aggregates, - AggregateExec.Mode mode, + AggregatorMode mode, Layout layout, boolean grouping, Consumer consumer @@ -228,11 +241,9 @@ private void aggregatesToFactory( if (ne instanceof Alias alias) { var child = alias.child(); if (child instanceof AggregateFunction aggregateFunction) { - AggregatorMode aggMode = null; List sourceAttr; - if (mode == AggregateExec.Mode.PARTIAL) { - aggMode = AggregatorMode.INITIAL; + if (mode == AggregatorMode.INITIAL) { // TODO: this needs to be made more reliable - use casting to blow up when dealing with expressions (e+1) Expression field = aggregateFunction.field(); // Only count can now support literals - all the other aggs should be optimized away @@ -257,9 +268,7 @@ private void aggregatesToFactory( } sourceAttr = List.of(attr); } - - } else if (mode == AggregateExec.Mode.FINAL) { - aggMode = AggregatorMode.FINAL; + } else if (mode == AggregatorMode.FINAL || mode == AggregatorMode.INTERMEDIATE) { if (grouping) { sourceAttr = aggregateMapper.mapGrouping(aggregateFunction); } else { @@ -279,7 +288,7 @@ private void aggregatesToFactory( assert inputChannels.size() > 0 && inputChannels.stream().allMatch(i -> i >= 0); } if (aggregateFunction instanceof ToAggregator agg) { - consumer.accept(new AggFunctionSupplierContext(agg.supplier(inputChannels), aggMode)); + consumer.accept(new AggFunctionSupplierContext(agg.supplier(inputChannels), mode)); } else { throw new EsqlIllegalArgumentException("aggregate functions must extend ToAggregator"); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java index 62b305a68bc2..1212e77557ca 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java @@ -54,7 +54,7 @@ public class Mapper { private final FunctionRegistry functionRegistry; - private final boolean localMode; + private final boolean localMode; // non-coordinator (data node) mode public Mapper(FunctionRegistry functionRegistry) { this.functionRegistry = functionRegistry; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java index 26c57f13e16c..fbfc57261bc4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java @@ -24,6 +24,7 @@ import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; import org.elasticsearch.xpack.esql.plan.logical.TopN; +import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.EsSourceExec; import org.elasticsearch.xpack.esql.plan.physical.EstimatesRowSize; @@ -87,23 +88,19 @@ public static PhysicalPlan dataNodeReductionPlan(LogicalPlan plan, PhysicalPlan if (pipelineBreakers.isEmpty() == false) { UnaryPlan pipelineBreaker = (UnaryPlan) pipelineBreakers.get(0); - if (pipelineBreaker instanceof TopN topN) { - return new TopNExec(topN.source(), unused, topN.order(), topN.limit(), 2000); + if (pipelineBreaker instanceof TopN) { + Mapper mapper = new Mapper(true); + var physicalPlan = EstimatesRowSize.estimateRowSize(0, mapper.map(plan)); + return physicalPlan.collectFirstChildren(TopNExec.class::isInstance).get(0); } else if (pipelineBreaker instanceof Limit limit) { return new LimitExec(limit.source(), unused, limit.limit()); } else if (pipelineBreaker instanceof OrderBy order) { return new OrderExec(order.source(), unused, order.order()); - } else if (pipelineBreaker instanceof Aggregate aggregate) { - // TODO handle this as a special PARTIAL step (intermediate) - /*return new AggregateExec( - aggregate.source(), - unused, - aggregate.groupings(), - aggregate.aggregates(), - AggregateExec.Mode.PARTIAL, - 0 - );*/ - return null; + } else if (pipelineBreaker instanceof Aggregate) { + Mapper mapper = new Mapper(true); + var physicalPlan = EstimatesRowSize.estimateRowSize(0, mapper.map(plan)); + var aggregate = (AggregateExec) physicalPlan.collectFirstChildren(AggregateExec.class::isInstance).get(0); + return aggregate.withMode(AggregateExec.Mode.PARTIAL); } else { throw new EsqlIllegalArgumentException("unsupported unary physical plan node [" + pipelineBreaker.nodeName() + "]"); } From 6308bbf032a2dbf7f6671634bd520c5310014f66 Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Thu, 9 May 2024 11:41:38 -0400 Subject: [PATCH 028/119] Add an APM metric to aggregations usage (#108110) This wires up the "new" APM metrics integration to the existing Aggregations usage tracking system. It introduces one new metric, a LongCounter named es.search.query.aggregations.total, which has dimensions for the specific aggregation being run, and the values source type we resolved it to. --------- Co-authored-by: Elastic Machine --- .../elasticsearch/node/NodeConstruction.java | 6 ++--- .../elasticsearch/search/SearchModule.java | 20 ++++++++++++++++- .../support/AggregationUsageService.java | 22 +++++++++++++++++++ .../support/ValuesSourceRegistry.java | 7 +++++- 4 files changed, 50 insertions(+), 5 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 14e8ce80fcf2..9585711b5562 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -257,7 +257,7 @@ static NodeConstruction prepareConstruction( ThreadPool threadPool = constructor.createThreadPool(settings, telemetryProvider.getMeterRegistry()); SettingsModule settingsModule = constructor.validateSettings(initialEnvironment.settings(), settings, threadPool); - SearchModule searchModule = constructor.createSearchModule(settingsModule.getSettings(), threadPool); + SearchModule searchModule = constructor.createSearchModule(settingsModule.getSettings(), threadPool, telemetryProvider); constructor.createClientAndRegistries(settingsModule.getSettings(), threadPool, searchModule); DocumentParsingProvider documentParsingProvider = constructor.getDocumentParsingProvider(); @@ -525,9 +525,9 @@ private SettingsModule validateSettings(Settings envSettings, Settings settings, return settingsModule; } - private SearchModule createSearchModule(Settings settings, ThreadPool threadPool) { + private SearchModule createSearchModule(Settings settings, ThreadPool threadPool, TelemetryProvider telemetryProvider) { IndexSearcher.setMaxClauseCount(SearchUtils.calculateMaxClauseValue(threadPool)); - return new SearchModule(settings, pluginsService.filterPlugins(SearchPlugin.class).toList()); + return new SearchModule(settings, pluginsService.filterPlugins(SearchPlugin.class).toList(), telemetryProvider); } /** diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index 97b747c650c1..8d5fa0a7ac15 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -254,6 +254,7 @@ import org.elasticsearch.search.vectors.KnnScoreDocQueryBuilder; import org.elasticsearch.search.vectors.KnnVectorQueryBuilder; import org.elasticsearch.search.vectors.QueryVectorBuilder; +import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentParser; @@ -289,6 +290,11 @@ public class SearchModule { Setting.Property.NodeScope ); + /** + * Metric name for aggregation usage statistics + */ + private final TelemetryProvider telemetryProvider; + private final Map highlighters; private final List fetchSubPhases = new ArrayList<>(); @@ -306,7 +312,19 @@ public class SearchModule { * @param plugins List of included {@link SearchPlugin} objects. */ public SearchModule(Settings settings, List plugins) { + this(settings, plugins, TelemetryProvider.NOOP); + } + + /** + * Constructs a new SearchModule object + * + * @param settings Current settings + * @param plugins List of included {@link SearchPlugin} objects. + * @param telemetryProvider + */ + public SearchModule(Settings settings, List plugins, TelemetryProvider telemetryProvider) { this.settings = settings; + this.telemetryProvider = telemetryProvider; registerSuggesters(plugins); highlighters = setupHighlighters(settings, plugins); registerScoreFunctions(plugins); @@ -352,7 +370,7 @@ public Map getHighlighters() { } private ValuesSourceRegistry registerAggregations(List plugins) { - ValuesSourceRegistry.Builder builder = new ValuesSourceRegistry.Builder(); + ValuesSourceRegistry.Builder builder = new ValuesSourceRegistry.Builder(telemetryProvider.getMeterRegistry()); registerAggregation( new AggregationSpec(AvgAggregationBuilder.NAME, AvgAggregationBuilder::new, AvgAggregationBuilder.PARSER).addResultReader( diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java index 853aa152db03..28ef6f934d28 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/AggregationUsageService.java @@ -9,12 +9,18 @@ package org.elasticsearch.search.aggregations.support; import org.elasticsearch.node.ReportingService; +import org.elasticsearch.telemetry.metric.LongCounter; +import org.elasticsearch.telemetry.metric.MeterRegistry; import java.util.HashMap; import java.util.Map; import java.util.concurrent.atomic.LongAdder; public class AggregationUsageService implements ReportingService { + private static final String ES_SEARCH_QUERY_AGGREGATIONS_TOTAL_COUNT = "es.search.query.aggregations.total"; + private final String AGGREGATION_NAME_KEY = "aggregation_name"; + private final String VALUES_SOURCE_KEY = "values_source"; + private final LongCounter aggregationsUsageCounter; private final Map> aggs; private final AggregationInfo info; @@ -22,9 +28,16 @@ public class AggregationUsageService implements ReportingService> aggs; + private final MeterRegistry meterRegistry; public Builder() { + this(MeterRegistry.NOOP); + } + + public Builder(MeterRegistry meterRegistry) { aggs = new HashMap<>(); + assert meterRegistry != null; + this.meterRegistry = meterRegistry; } public void registerAggregationUsage(String aggregationName) { @@ -45,9 +58,16 @@ public AggregationUsageService build() { } } + // Attribute names for the metric + private AggregationUsageService(Builder builder) { this.aggs = builder.aggs; info = new AggregationInfo(aggs); + this.aggregationsUsageCounter = builder.meterRegistry.registerLongCounter( + ES_SEARCH_QUERY_AGGREGATIONS_TOTAL_COUNT, + "Aggregations usage", + "count" + ); } public void incAggregationUsage(String aggregationName, String valuesSourceType) { @@ -61,6 +81,8 @@ public void incAggregationUsage(String aggregationName, String valuesSourceType) assert adder != null : "Unknown subtype [" + aggregationName + "][" + valuesSourceType + "]"; } assert valuesSourceMap != null : "Unknown aggregation [" + aggregationName + "][" + valuesSourceType + "]"; + // tests will have a no-op implementation here + aggregationsUsageCounter.incrementBy(1, Map.of(AGGREGATION_NAME_KEY, aggregationName, VALUES_SOURCE_KEY, valuesSourceType)); } public Map getUsageStats() { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistry.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistry.java index 44e66d98f025..fcfcad96d9fb 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistry.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistry.java @@ -10,6 +10,7 @@ import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.AggregationExecutionException; +import org.elasticsearch.telemetry.metric.MeterRegistry; import java.util.AbstractMap; import java.util.ArrayList; @@ -58,7 +59,11 @@ public static class Builder { private final Map, List>> aggregatorRegistry = new HashMap<>(); public Builder() { - this.usageServiceBuilder = new AggregationUsageService.Builder(); + this(MeterRegistry.NOOP); + } + + public Builder(MeterRegistry meterRegistry) { + this.usageServiceBuilder = new AggregationUsageService.Builder(meterRegistry); } /** From 9f438edb43cbc95f4156ce4bc8b4251a2f75da9d Mon Sep 17 00:00:00 2001 From: Mark Tozzi Date: Thu, 9 May 2024 11:44:24 -0400 Subject: [PATCH 029/119] Fix ClassCastException in Significant Terms (#108429) Prior to this PR, if a SignificantTerms aggregation targeted a field existing on two indices (that were included in the aggregation) but mapped to different field types, the query would fail at reduce time with a somewhat obscure ClassCastException. This change brings the behavior in line with the Terms aggregation, which returns a 400 class IllegalArgumentException with a useful message in this situation. Resolves #108427 --- docs/changelog/108429.yaml | 6 ++++++ .../terms/InternalSignificantTerms.java | 19 +++++++++++++++++++ 2 files changed, 25 insertions(+) create mode 100644 docs/changelog/108429.yaml diff --git a/docs/changelog/108429.yaml b/docs/changelog/108429.yaml new file mode 100644 index 000000000000..562454a0de25 --- /dev/null +++ b/docs/changelog/108429.yaml @@ -0,0 +1,6 @@ +pr: 108429 +summary: Fix `ClassCastException` in Significant Terms +area: Aggregations +type: bug +issues: + - 108427 diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java index f8e7f3cf3a69..91bb4c3f0cd7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.util.ObjectObjectPagedHashMap; import org.elasticsearch.core.Releasables; import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.AggregationErrors; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorReducer; @@ -29,6 +30,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Optional; /** * Result of the significant terms aggregation. @@ -208,10 +210,27 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont reduceContext.bigArrays() ); + private InternalAggregation referenceAgg = null; + @Override public void accept(InternalAggregation aggregation) { + /* + canLeadReduction here is essentially checking if this shard returned data. Unmapped shards (that didn't + specify a missing value) will be false. Since they didn't return data, we can safely skip them, and + doing so prevents us from accidentally taking one as the reference agg for type checking, which would cause + shards that actually returned data to fail. + */ + if (aggregation.canLeadReduction() == false) { + return; + } @SuppressWarnings("unchecked") final InternalSignificantTerms terms = (InternalSignificantTerms) aggregation; + if (referenceAgg == null) { + referenceAgg = terms; + } else if (referenceAgg.getClass().equals(terms.getClass()) == false) { + // We got here because shards had different mappings for the same field (presumably different indices) + throw AggregationErrors.reduceTypeMismatch(referenceAgg.getName(), Optional.empty()); + } // Compute the overall result set size and the corpus size using the // top-level Aggregations from each shard globalSubsetSize += terms.getSubsetSize(); From 1a55e2fa76b938f9a354daa2d8c64f0c86db2077 Mon Sep 17 00:00:00 2001 From: Stef Nestor <26751266+stefnestor@users.noreply.github.com> Date: Thu, 9 May 2024 10:27:19 -0600 Subject: [PATCH 030/119] (Doc+) Capture Elasticsearch diagnostic (#108259) * (Doc+) Capture Elasticsearch diagnostic * add diagnostic topic to nav, chunk content, style edits * fix test --------- Co-authored-by: shainaraskas --- docs/reference/troubleshooting.asciidoc | 2 + .../troubleshooting/diagnostic.asciidoc | 152 ++++++++++++++++++ 2 files changed, 154 insertions(+) create mode 100644 docs/reference/troubleshooting/diagnostic.asciidoc diff --git a/docs/reference/troubleshooting.asciidoc b/docs/reference/troubleshooting.asciidoc index 01ef39b69c52..ceff8619062c 100644 --- a/docs/reference/troubleshooting.asciidoc +++ b/docs/reference/troubleshooting.asciidoc @@ -138,3 +138,5 @@ include::troubleshooting/troubleshooting-searches.asciidoc[] include::troubleshooting/troubleshooting-shards-capacity.asciidoc[] include::troubleshooting/troubleshooting-unbalanced-cluster.asciidoc[] + +include::troubleshooting/diagnostic.asciidoc[] diff --git a/docs/reference/troubleshooting/diagnostic.asciidoc b/docs/reference/troubleshooting/diagnostic.asciidoc new file mode 100644 index 000000000000..a944ca88d285 --- /dev/null +++ b/docs/reference/troubleshooting/diagnostic.asciidoc @@ -0,0 +1,152 @@ +[[diagnostic]] +== Capturing diagnostics +++++ +Capture diagnostics +++++ +:keywords: Elasticsearch diagnostic, diagnostics + +The {es} https://github.com/elastic/support-diagnostics[Support Diagnostic] tool captures a point-in-time snapshot of cluster statistics and most settings. +It works against all {es} versions. + +This information can be used to troubleshoot problems with your cluster. For examples of issues that you can troubleshoot using Support Diagnostic tool output, refer to https://www.elastic.co/blog/why-does-elastic-support-keep-asking-for-diagnostic-files[the Elastic blog]. + +You can generate diagnostic information using this tool before you contact https://support.elastic.co[Elastic Support] or +https://discuss.elastic.co[Elastic Discuss] to minimize turnaround time. + +[discrete] +[[diagnostic-tool-requirements]] +=== Requirements + +- Java Runtime Environment or Java Development Kit v1.8 or higher + +[discrete] +[[diagnostic-tool-access]] +=== Access the tool + +The Support Diagnostic tool is included as a sub-library in some Elastic deployments: + +* {ece}: Located under **{ece}** > **Deployment** > **Operations** > +**Prepare Bundle** > **{es}**. +* {eck}: Run as https://www.elastic.co/guide/en/cloud-on-k8s/current/k8s-take-eck-dump.html[`eck-diagnostics`]. + +You can also directly download the `diagnostics-X.X.X-dist.zip` file for the latest Support Diagnostic release +from https://github.com/elastic/support-diagnostics/releases/latest[the `support-diagnostic` repo]. + + +[discrete] +[[diagnostic-capture]] +=== Capture diagnostic information + +To capture an {es} diagnostic: + +. In a terminal, verify that your network and user permissions are sufficient to connect to your {es} +cluster by polling the cluster's <>. ++ +For example, with the parameters `host:localhost`, `port:9200`, and `username:elastic`, you'd use the following curl request: ++ +[source,sh] +---- +curl -X GET -k -u elastic -p https://localhost:9200/_cluster/health +---- +// NOTCONSOLE ++ +If you receive a an HTTP 200 `OK` response, then you can proceed to the next step. If you receive a different +response code, then <> before proceeding. + +. Using the same environment parameters, run the diagnostic tool script. ++ +For information about the parameters that you can pass to the tool, refer to the https://github.com/elastic/support-diagnostics#standard-options[diagnostic +parameter reference]. ++ +The following command options are recommended: ++ +**Unix-based systems** ++ +[source,sh] +---- +sudo ./diagnostics.sh --type local --host localhost --port 9200 -u elastic -p --bypassDiagVerify --ssl --noVerify +---- ++ +**Windows** ++ +[source,sh] +---- +sudo .\diagnostics.bat --type local --host localhost --port 9200 -u elastic -p --bypassDiagVerify --ssl --noVerify +---- ++ +[TIP] +.Script execution modes +==== +You can execute the script in three https://github.com/elastic/support-diagnostics#diagnostic-types[modes]: + +* `local` (default, recommended): Polls the <>, +gathers operating system info, and captures cluster and GC logs. + +* `remote`: Establishes an ssh session +to the applicable target server to pull the same information as `local`. + +* `api`: Polls the <>. All other data must be +collected manually. +==== + +. When the script has completed, verify that no errors were logged to `diagnostic.log`. +If the log file contains errors, then refer to <>. + +. If the script completed without errors, then an archive with the format `-diagnostics-.zip` is created in the working directory, or an output directory you have specified. You can review or share the diagnostic archive as needed. + +[discrete] +[[diagnostic-non-200]] +=== Diagnose a non-200 cluster health response + +When you poll your cluster health, if you receive any response other than `200 0K`, then the diagnostic tool +might not work as intended. The following are possible error codes and their resolutions: + +HTTP 401 `UNAUTHENTICATED`:: +Additional information in the error will usually indicate either +that your `username:password` pair is invalid, or that your `.security` +index is unavailable and you need to setup a temporary +<> user with `role:superuser` to authenticate. + +HTTP 403 `UNAUTHORIZED`:: +Your `username` is recognized but +has insufficient permissions to run the diagnostic. Either use a different +username or elevate the user's privileges. + +HTTP 429 `TOO_MANY_REQUESTS` (for example, `circuit_breaking_exception`):: +Your username authenticated and authorized, but the cluster is under +sufficiently high strain that it's not responding to API calls. These +responses are usually intermittent. You can proceed with running the diagnostic, +but the diagnostic results might be incomplete. + +HTTP 504 `BAD_GATEWAY`:: +Your network is experiencing issues reaching the cluster. You might be using a proxy or firewall. +Consider running the diagnostic tool from a different location, confirming your port, or using an IP +instead of a URL domain. + +HTTP 503 `SERVICE_UNAVAILABLE` (for example, `master_not_discovered_exception`):: +Your cluster does not currently have an elected master node, which is +required for it to be API-responsive. This might be temporary while the master +node rotates. If the issue persists, then <> +before proceeding. + +[discrete] +[[diagnostic-log-errors]] +=== Diagnose errors in `diagnostic.log` + +The following are common errors that you might encounter when running the diagnostic tool: + +* `Error: Could not find or load main class com.elastic.support.diagnostics.DiagnosticApp` ++ +This indicates that you accidentally downloaded the source code file +instead of `diagnostics-X.X.X-dist.zip` from the releases page. + +* `Could not retrieve the Elasticsearch version due to a system or network error - unable to continue.` ++ +This indicates that the diagnostic couldn't run commands against the cluster. +Poll the cluster's health again, and ensure that you're using the same parameters +when you run the dianostic batch or shell file. + +* A `security_exception` that includes `is unauthorized for user`: ++ +The provided user has insufficient admin permissions to run the diagnostic tool. Use another +user, or grant the user `role:superuser` privileges. \ No newline at end of file From 91bed34e72c6fe91773f5af81e1d8be0e028f514 Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 9 May 2024 18:37:31 +0100 Subject: [PATCH 031/119] Clarify docs on deleting searchable snapshots (#108451) The word `cannot` implies Elasticsearch prevents you from doing these things, but it doesn't have this protection today (see #73947). This commit clarifies this by saying `must not` instead. Closes #108450 --- docs/reference/searchable-snapshots/index.asciidoc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/reference/searchable-snapshots/index.asciidoc b/docs/reference/searchable-snapshots/index.asciidoc index 4a56961246c2..794496c8b24a 100644 --- a/docs/reference/searchable-snapshots/index.asciidoc +++ b/docs/reference/searchable-snapshots/index.asciidoc @@ -310,9 +310,9 @@ of {search-snap} indices. The sole copy of the data in a {search-snap} index is the underlying snapshot, stored in the repository. For example: -* You cannot unregister a repository while any of the searchable snapshots it -contains are mounted in {es}. You also cannot delete a snapshot if any of its -indices are mounted as a searchable snapshot in the same cluster. +* You must not unregister a repository while any of the searchable snapshots it +contains are mounted in {es}. You also must not delete a snapshot if any of its +indices are mounted as searchable snapshots. * If you mount indices from snapshots held in a repository to which a different cluster has write access then you must make sure that the other cluster does not From 6ecb295ff1c7bc473e043ce896d06525973dd5b7 Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 9 May 2024 19:04:45 +0100 Subject: [PATCH 032/119] Document `transport.compress` trade-offs more clearly (#108458) Spells out explicitly that setting `transport.compress: true` may cost extra CPU. --- .../cluster/remote-clusters-settings.asciidoc | 53 ++++++++++--------- docs/reference/modules/transport.asciidoc | 42 +++++++++++---- 2 files changed, 62 insertions(+), 33 deletions(-) diff --git a/docs/reference/modules/cluster/remote-clusters-settings.asciidoc b/docs/reference/modules/cluster/remote-clusters-settings.asciidoc index 848a29c64279..2308ec259da4 100644 --- a/docs/reference/modules/cluster/remote-clusters-settings.asciidoc +++ b/docs/reference/modules/cluster/remote-clusters-settings.asciidoc @@ -59,35 +59,40 @@ you configure the remotes. `cluster.remote..transport.compress`:: - Per cluster setting that enables you to configure compression for requests - to a specific remote cluster. This setting impacts only requests - sent to the remote cluster. If the inbound request is compressed, - Elasticsearch compresses the response. The setting options are `true`, - `indexing_data`, and `false`. If unset, the global `transport.compress` is - used as the fallback setting. + Per-cluster setting that enables you to configure compression for requests to + a specific remote cluster. The handling cluster will automatically compress + responses to compressed requests. The setting options are `true`, + `indexing_data`, and `false`. If unset, defaults to the behaviour specified + by the node-wide `transport.compress` setting. See the + <> for further information. `cluster.remote..transport.compression_scheme`:: - Per cluster setting that enables you to configure compression scheme for - requests to a specific remote cluster. This setting impacts only requests - sent to the remote cluster. If an inbound request is compressed, {es} - compresses the response using the same compression scheme. The setting options - are `deflate` and `lz4`. If unset, the global `transport.compression_scheme` - is used as the fallback setting. + Per-cluster setting that enables you to configure the compression scheme for + requests to a specific cluster if those requests are selected to be + compressed by to the `cluster.remote..transport.compress` + setting. The handling cluster will automatically use the same compression + scheme for responses as for the corresponding requests. The setting options + are `deflate` and `lz4`. If unset, defaults to the behaviour specified by the + node-wide `transport.compression_scheme` setting. See the + <> for further information. - -`cluster.remote..credentials` (<>, <>):: [[remote-cluster-credentials-setting]] - - Per cluster setting for configuring <>. - This setting takes the encoded value of a - <> and must be set - in the <> on each node in the cluster. - The presence (or not) of this setting determines which model a remote cluster uses. - If present, the remote cluster uses the API key based model. - Otherwise, it uses the certificate based model. - If the setting is added, removed, or updated in the <> and reloaded via the - <> API, the cluster will automatically rebuild its connection to the remote. +`cluster.remote..credentials`:: + + (<>, <>) + Per-cluster setting for configuring <>. This setting takes the encoded value of a + <> and must + be set in the <> on each node in the cluster. + The presence (or not) of this setting determines which model a remote cluster + uses. If present, the remote cluster uses the API key based model. Otherwise, + it uses the certificate based model. If the setting is added, removed, or + updated in the <> and reloaded via the + <> API, the cluster will automatically + rebuild its connection to the remote. [[remote-cluster-sniff-settings]] ==== Sniff mode remote cluster settings diff --git a/docs/reference/modules/transport.asciidoc b/docs/reference/modules/transport.asciidoc index 2ec574544f9b..d08da2cfc1d2 100644 --- a/docs/reference/modules/transport.asciidoc +++ b/docs/reference/modules/transport.asciidoc @@ -47,20 +47,44 @@ different from `transport.port`. Defaults to the port assigned via The connect timeout for initiating a new connection (in time setting format). Defaults to `30s`. +[[transport-settings-compress]] `transport.compress`:: (<>, string) -Set to `true`, `indexing_data`, or `false` to configure transport compression -between nodes. The option `true` will compress all data. The option -`indexing_data` will compress only the raw index data sent between nodes during -ingest, ccr following (excluding bootstrap), and operations based shard recovery -(excluding transferring lucene files). Defaults to `indexing_data`. +Determines which transport requests are compressed before sending them to +another node. {es} will compress transport responses if and only if the +corresponding request was compressed. See also `transport.compression_scheme`, +which specifies the compression scheme which is used. Accepts the following +values: ++ +-- +`false`:: + +No transport requests are compressed. This option uses the most network +bandwidth, but avoids the CPU overhead of compression and decompression. + +`indexing_data`:: + +Compresses only the raw indexing data sent between nodes during ingest, CCR +following (excluding bootstrapping) and operations-based shard recovery +(excluding file-based recovery which copies the raw Lucene data). This option +is a good trade-off between network bandwidth savings and the extra CPU +required for compression and decompression. This option is the default. + +`true`:: + +All transport requests are compressed. This option may perform better than +`indexing_data` in terms of network bandwidth, but will require the most CPU +for compression and decompression work. +-- +[[transport-settings-compression-scheme]] `transport.compression_scheme`:: (<>, string) -Configures the compression scheme for `transport.compress`. The options are -`deflate` or `lz4`. If `lz4` is configured and the remote node has not been -upgraded to a version supporting `lz4`, the traffic will be sent uncompressed. -Defaults to `lz4`. +Configures the compression scheme for requests which are selected for +compression by to the `transport.compress` setting. Accepts either `deflate` or +`lz4`, which offer different trade-offs between compression ratio and CPU +usage. {es} will use the same compression scheme for responses as for the +corresponding requests. Defaults to `lz4`. `transport.tcp.keep_alive`:: (<>, boolean) From ff201646f0c977c72421275f5d57478b04cbcb05 Mon Sep 17 00:00:00 2001 From: Parker Timmins Date: Thu, 9 May 2024 12:17:23 -0600 Subject: [PATCH 033/119] Allow read_slm to call GET /_slm/status (#108333) Add the ability to access the SLM status api to the read_slm privilege. --- docs/changelog/108333.yaml | 5 +++++ .../security/authz/privilege/ClusterPrivilegeResolver.java | 7 ++++++- .../core/security/authz/privilege/PrivilegeTests.java | 7 ++++++- 3 files changed, 17 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/108333.yaml diff --git a/docs/changelog/108333.yaml b/docs/changelog/108333.yaml new file mode 100644 index 000000000000..c3152500ce1b --- /dev/null +++ b/docs/changelog/108333.yaml @@ -0,0 +1,5 @@ +pr: 108333 +summary: Allow `read_slm` to call GET /_slm/status +area: ILM+SLM +type: bug +issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java index 372b62cffeae..7f927d45a237 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java @@ -53,6 +53,7 @@ import org.elasticsearch.xpack.core.security.action.user.ProfileHasPrivilegesAction; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.support.Automatons; +import org.elasticsearch.xpack.core.slm.action.GetSLMStatusAction; import org.elasticsearch.xpack.core.slm.action.GetSnapshotLifecycleAction; import java.util.Collection; @@ -165,7 +166,11 @@ public class ClusterPrivilegeResolver { ILMActions.STOP.name(), GetStatusAction.NAME ); - private static final Set READ_SLM_PATTERN = Set.of(GetSnapshotLifecycleAction.NAME, GetStatusAction.NAME); + private static final Set READ_SLM_PATTERN = Set.of( + GetSLMStatusAction.NAME, + GetSnapshotLifecycleAction.NAME, + GetStatusAction.NAME + ); private static final Set MANAGE_SEARCH_APPLICATION_PATTERN = Set.of("cluster:admin/xpack/application/search_application/*"); private static final Set MANAGE_SEARCH_QUERY_RULES_PATTERN = Set.of("cluster:admin/xpack/query_rules/*"); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java index d15fb9a1409d..ad73944f4c64 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java @@ -460,7 +460,12 @@ public void testSlmPrivileges() { } { - verifyClusterActionAllowed(ClusterPrivilegeResolver.READ_SLM, "cluster:admin/slm/get", "cluster:admin/ilm/operation_mode/get"); + verifyClusterActionAllowed( + ClusterPrivilegeResolver.READ_SLM, + "cluster:admin/slm/get", + "cluster:admin/slm/status", + "cluster:admin/ilm/operation_mode/get" + ); verifyClusterActionDenied( ClusterPrivilegeResolver.READ_SLM, "cluster:admin/slm/delete", From 0b71746d96acde1397a11a99ea466f1078124eb3 Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Thu, 9 May 2024 14:19:40 -0400 Subject: [PATCH 034/119] [Transform] Retry Destination IndexNotFoundException (#108394) A Destination Index can be removed from its previous shard in the middle of a Transform run. Ideally, this happens as part of the Delete API, and the Transform has already been stopped, but in the case that it isn't, we want to retry the checkpoint. If the Transform had been stopped, the retry will move the Indexer into a graceful shutdown. If the Transform had not been stopped, the retry will check if the Index exists or recreate the Index if it does not exist. This is currently how unattended Transforms work, and this change will make it so regular Transforms can also auto-recover from this error. Fix #107263 --- docs/changelog/108394.yaml | 6 + .../transforms/ClientTransformIndexer.java | 6 +- .../transforms/TransformContext.java | 9 + .../transforms/TransformIndexer.java | 33 ++- .../utils/ExceptionRootCauseFinder.java | 12 +- .../TransformIndexerFailureHandlingTests.java | 204 +++++++++++++++++- .../utils/ExceptionRootCauseFinderTests.java | 129 +++-------- 7 files changed, 278 insertions(+), 121 deletions(-) create mode 100644 docs/changelog/108394.yaml diff --git a/docs/changelog/108394.yaml b/docs/changelog/108394.yaml new file mode 100644 index 000000000000..58f48fa548c6 --- /dev/null +++ b/docs/changelog/108394.yaml @@ -0,0 +1,6 @@ +pr: 108394 +summary: Handle `IndexNotFoundException` +area: Transform +type: bug +issues: + - 107263 diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java index ed0f721f5f7f..df8c3f62034e 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java @@ -193,7 +193,11 @@ protected void handleBulkResponse(BulkResponse bulkResponse, ActionListener listener) { }, listener::onFailure); var deducedDestIndexMappings = new SetOnce>(); - var shouldMaybeCreateDestIndexForUnattended = context.getCheckpoint() == 0 - && TransformEffectiveSettings.isUnattended(transformConfig.getSettings()); + + // if the unattended transform had not created the destination index yet, or if the destination index was deleted for any + // type of transform during the last run, then we try to create the destination index. + // This is important to create the destination index explicitly before indexing documents. Otherwise, the destination + // index aliases may be missing. + var shouldMaybeCreateDestIndex = isFirstUnattendedRun() || context.shouldRecreateDestinationIndex(); ActionListener> fieldMappingsListener = ActionListener.wrap(destIndexMappings -> { if (destIndexMappings.isEmpty() == false) { @@ -359,11 +363,12 @@ protected void onStart(long now, ActionListener listener) { // ... otherwise we fall back to index mappings deduced based on source indices this.fieldMappings = deducedDestIndexMappings.get(); } - // Since the unattended transform could not have created the destination index yet, we do it here. - // This is important to create the destination index explicitly before indexing first documents. Otherwise, the destination - // index aliases may be missing. - if (destIndexMappings.isEmpty() && shouldMaybeCreateDestIndexForUnattended) { - doMaybeCreateDestIndex(deducedDestIndexMappings.get(), configurationReadyListener); + + if (destIndexMappings.isEmpty() && shouldMaybeCreateDestIndex) { + doMaybeCreateDestIndex(deducedDestIndexMappings.get(), configurationReadyListener.delegateFailure((delegate, response) -> { + context.setShouldRecreateDestinationIndex(false); + delegate.onResponse(response); + })); } else { configurationReadyListener.onResponse(null); } @@ -380,7 +385,7 @@ protected void onStart(long now, ActionListener listener) { deducedDestIndexMappings.set(validationResponse.getDestIndexMappings()); if (isContinuous()) { transformsConfigManager.getTransformConfiguration(getJobId(), ActionListener.wrap(config -> { - if (transformConfig.equals(config) && fieldMappings != null && shouldMaybeCreateDestIndexForUnattended == false) { + if (transformConfig.equals(config) && fieldMappings != null && shouldMaybeCreateDestIndex == false) { logger.trace("[{}] transform config has not changed.", getJobId()); configurationReadyListener.onResponse(null); } else { @@ -415,7 +420,7 @@ protected void onStart(long now, ActionListener listener) { }, listener::onFailure); Instant instantOfTrigger = Instant.ofEpochMilli(now); - // If we are not on the initial batch checkpoint and its the first pass of whatever continuous checkpoint we are on, + // If we are not on the initial batch checkpoint and it's the first pass of whatever continuous checkpoint we are on, // we should verify if there are local changes based on the sync config. If not, do not proceed further and exit. if (context.getCheckpoint() > 0 && initialRun()) { checkpointProvider.sourceHasChanged(getLastCheckpoint(), ActionListener.wrap(hasChanged -> { @@ -436,8 +441,7 @@ protected void onStart(long now, ActionListener listener) { hasSourceChanged = true; listener.onFailure(failure); })); - } else if (context.getCheckpoint() == 0 && TransformEffectiveSettings.isUnattended(transformConfig.getSettings())) { - // this transform runs in unattended mode and has never run, to go on + } else if (shouldMaybeCreateDestIndex) { validate(changedSourceListener); } else { hasSourceChanged = true; @@ -447,6 +451,13 @@ protected void onStart(long now, ActionListener listener) { } } + /** + * Returns true if this transform runs in unattended mode and has never run. + */ + private boolean isFirstUnattendedRun() { + return context.getCheckpoint() == 0 && TransformEffectiveSettings.isUnattended(transformConfig.getSettings()); + } + protected void initializeFunction() { // create the function function = FunctionFactory.create(getConfig()); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinder.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinder.java index 8618b01a0440..8bf859a020ba 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinder.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinder.java @@ -9,6 +9,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchContextMissingException; import org.elasticsearch.tasks.TaskCancelledException; @@ -63,7 +64,7 @@ public static Throwable getFirstIrrecoverableExceptionFromBulkResponses(Collecti } if (unwrappedThrowable instanceof ElasticsearchException elasticsearchException) { - if (isExceptionIrrecoverable(elasticsearchException)) { + if (isExceptionIrrecoverable(elasticsearchException) && isNotIndexNotFoundException(elasticsearchException)) { return elasticsearchException; } } @@ -72,6 +73,15 @@ public static Throwable getFirstIrrecoverableExceptionFromBulkResponses(Collecti return null; } + /** + * We can safely recover from IndexNotFoundExceptions on Bulk responses. + * If the transform is running, the next checkpoint will recreate the index. + * If the transform is not running, the next start request will recreate the index. + */ + private static boolean isNotIndexNotFoundException(ElasticsearchException elasticsearchException) { + return elasticsearchException instanceof IndexNotFoundException == false; + } + public static boolean isExceptionIrrecoverable(ElasticsearchException elasticsearchException) { if (IRRECOVERABLE_REST_STATUSES.contains(elasticsearchException.status())) { diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java index fe54847af040..f39a4329f2bb 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java @@ -10,10 +10,13 @@ import org.apache.lucene.search.TotalHits; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -27,6 +30,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.reindex.BulkByScrollResponse; import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.script.ScriptException; @@ -75,6 +79,7 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; import java.util.stream.Collectors; +import java.util.stream.Stream; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.core.transform.transforms.DestConfigTests.randomDestConfig; @@ -85,6 +90,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.matchesRegex; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; @@ -101,6 +107,10 @@ public class TransformIndexerFailureHandlingTests extends ESTestCase { private Client client; private ThreadPool threadPool; + private static final Function EMPTY_BULK_RESPONSE = bulkRequest -> new BulkResponse( + new BulkItemResponse[0], + 100 + ); static class MockedTransformIndexer extends ClientTransformIndexer { @@ -110,6 +120,7 @@ static class MockedTransformIndexer extends ClientTransformIndexer { // used for synchronizing with the test private CountDownLatch latch; + private int doProcessCount; MockedTransformIndexer( ThreadPool threadPool, @@ -127,7 +138,8 @@ static class MockedTransformIndexer extends ClientTransformIndexer { TransformContext context, Function searchFunction, Function bulkFunction, - Function deleteByQueryFunction + Function deleteByQueryFunction, + int doProcessCount ) { super( threadPool, @@ -157,6 +169,7 @@ static class MockedTransformIndexer extends ClientTransformIndexer { this.searchFunction = searchFunction; this.bulkFunction = bulkFunction; this.deleteByQueryFunction = deleteByQueryFunction; + this.doProcessCount = doProcessCount; } public void initialize() { @@ -278,6 +291,17 @@ void doGetFieldMappings(ActionListener> fieldMappingsListene protected void persistState(TransformState state, ActionListener listener) { listener.onResponse(null); } + + @Override + protected IterationResult doProcess(SearchResponse searchResponse) { + if (doProcessCount > 0) { + doProcessCount -= 1; + // pretend that we processed 10k documents for each call + getStats().incrementNumDocuments(10_000); + return new IterationResult<>(Stream.of(new IndexRequest()), new TransformIndexerPosition(null, null), false); + } + return super.doProcess(searchResponse); + } } @Before @@ -936,6 +960,152 @@ public void testHandleFailureAuditing() { auditor.assertAllExpectationsMatched(); } + /** + * Given no bulk upload errors + * When we run the indexer + * Then we should not fail or recreate the destination index + */ + public void testHandleBulkResponseWithNoFailures() throws Exception { + var indexer = runIndexer(createMockIndexer(returnHit(), EMPTY_BULK_RESPONSE)); + assertThat(indexer.getStats().getIndexFailures(), is(0L)); + assertFalse(indexer.context.shouldRecreateDestinationIndex()); + assertNull(indexer.context.getLastFailure()); + } + + private static TransformIndexer runIndexer(MockedTransformIndexer indexer) throws Exception { + var latch = indexer.newLatch(1); + indexer.start(); + assertThat(indexer.getState(), equalTo(IndexerState.STARTED)); + assertTrue(indexer.maybeTriggerAsyncJob(System.currentTimeMillis())); + assertThat(indexer.getState(), equalTo(IndexerState.INDEXING)); + latch.countDown(); + assertBusy(() -> assertThat(indexer.getState(), equalTo(IndexerState.STARTED)), 10, TimeUnit.SECONDS); + return indexer; + } + + private MockedTransformIndexer createMockIndexer( + Function searchFunction, + Function bulkFunction + ) { + return createMockIndexer(searchFunction, bulkFunction, mock(TransformContext.Listener.class)); + } + + private static Function returnHit() { + return request -> new SearchResponse( + new SearchHits(new SearchHit[] { new SearchHit(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), + // Simulate completely null aggs + null, + new Suggest(Collections.emptyList()), + false, + false, + new SearchProfileResults(Collections.emptyMap()), + 1, + "", + 1, + 1, + 0, + 0, + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY + ); + } + + /** + * Given an irrecoverable bulk upload error + * When we run the indexer + * Then we should fail without retries and not recreate the destination index + */ + public void testHandleBulkResponseWithIrrecoverableFailures() throws Exception { + var failCalled = new AtomicBoolean(); + var indexer = runIndexer( + createMockIndexer( + returnHit(), + bulkResponseWithError(new ResourceNotFoundException("resource not found error")), + createContextListener(failCalled, new AtomicReference<>()) + ) + ); + assertThat(indexer.getStats().getIndexFailures(), is(1L)); + assertFalse(indexer.context.shouldRecreateDestinationIndex()); + assertTrue(failCalled.get()); + } + + private MockedTransformIndexer createMockIndexer( + Function searchFunction, + Function bulkFunction, + TransformContext.Listener listener + ) { + return createMockIndexer( + new TransformConfig( + randomAlphaOfLength(10), + randomSourceConfig(), + randomDestConfig(), + null, + null, + null, + randomPivotConfig(), + null, + randomBoolean() ? null : randomAlphaOfLengthBetween(1, 1000), + new SettingsConfig.Builder().setMaxPageSearchSize(randomBoolean() ? null : randomIntBetween(500, 10_000)).build(), + null, + null, + null, + null + ), + new AtomicReference<>(IndexerState.STOPPED), + searchFunction, + bulkFunction, + null, + threadPool, + ThreadPool.Names.GENERIC, + mock(TransformAuditor.class), + new TransformContext(TransformTaskState.STARTED, "", 0, listener), + 1 + ); + } + + private static Function bulkResponseWithError(Exception e) { + return bulkRequest -> new BulkResponse( + new BulkItemResponse[] { + BulkItemResponse.failure(1, DocWriteRequest.OpType.INDEX, new BulkItemResponse.Failure("the_index", "id", e)) }, + 100 + ); + } + + /** + * Given an IndexNotFound bulk upload error + * When we run the indexer + * Then we should fail with retries and recreate the destination index + */ + public void testHandleBulkResponseWithIndexNotFound() throws Exception { + var indexer = runIndexerWithBulkResponseError(new IndexNotFoundException("Some Error")); + assertThat(indexer.getStats().getIndexFailures(), is(1L)); + assertTrue(indexer.context.shouldRecreateDestinationIndex()); + assertFalse(bulkIndexingException(indexer).isIrrecoverable()); + } + + private TransformIndexer runIndexerWithBulkResponseError(Exception e) throws Exception { + return runIndexer(createMockIndexer(returnHit(), bulkResponseWithError(e))); + } + + private static BulkIndexingException bulkIndexingException(TransformIndexer indexer) { + var lastFailure = indexer.context.getLastFailure(); + assertNotNull(lastFailure); + assertThat(lastFailure, instanceOf(BulkIndexingException.class)); + return (BulkIndexingException) lastFailure; + } + + /** + * Given a recoverable bulk upload error + * When we run the indexer + * Then we should fail with retries and not recreate the destination index + */ + public void testHandleBulkResponseWithNoIrrecoverableFailures() throws Exception { + var indexer = runIndexerWithBulkResponseError(new EsRejectedExecutionException("es rejected execution")); + assertThat(indexer.getStats().getIndexFailures(), is(1L)); + assertFalse(indexer.context.shouldRecreateDestinationIndex()); + assertFalse(bulkIndexingException(indexer).isIrrecoverable()); + } + public void testHandleFailure() { testHandleFailure(0, 5, 0, 0); testHandleFailure(5, 0, 5, 2); @@ -1042,11 +1212,36 @@ private MockedTransformIndexer createMockIndexer( String executorName, TransformAuditor auditor, TransformContext context + ) { + return createMockIndexer( + config, + state, + searchFunction, + bulkFunction, + deleteByQueryFunction, + threadPool, + executorName, + auditor, + context, + 0 + ); + } + + private MockedTransformIndexer createMockIndexer( + TransformConfig config, + AtomicReference state, + Function searchFunction, + Function bulkFunction, + Function deleteByQueryFunction, + ThreadPool threadPool, + String executorName, + TransformAuditor auditor, + TransformContext context, + int doProcessCount ) { IndexBasedTransformConfigManager transformConfigManager = mock(IndexBasedTransformConfigManager.class); doAnswer(invocationOnMock -> { - @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; + ActionListener listener = invocationOnMock.getArgument(1); listener.onResponse(config); return null; }).when(transformConfigManager).getTransformConfiguration(any(), any()); @@ -1066,7 +1261,8 @@ private MockedTransformIndexer createMockIndexer( context, searchFunction, bulkFunction, - deleteByQueryFunction + deleteByQueryFunction, + doProcessCount ); indexer.initialize(); diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinderTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinderTests.java index b71156cad5ad..9a0431d40a97 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinderTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/utils/ExceptionRootCauseFinderTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.mapper.DocumentParsingException; import org.elasticsearch.index.mapper.MapperException; import org.elasticsearch.index.shard.ShardId; @@ -27,116 +28,27 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentLocation; +import java.util.Arrays; import java.util.Collection; -import java.util.HashMap; import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Function; +import java.util.stream.Collectors; public class ExceptionRootCauseFinderTests extends ESTestCase { public void testGetFirstIrrecoverableExceptionFromBulkResponses() { - Map bulkItemResponses = new HashMap<>(); - - int id = 1; - // 1 - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure( - "the_index", - "id", - new DocumentParsingException(XContentLocation.UNKNOWN, "document parsing error") - ) - ) - ); - // 2 - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure("the_index", "id", new ResourceNotFoundException("resource not found error")) - ) - ); - // 3 - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure("the_index", "id", new IllegalArgumentException("illegal argument error")) - ) - ); - // 4 not irrecoverable - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure("the_index", "id", new EsRejectedExecutionException("es rejected execution")) - ) - ); - // 5 not irrecoverable - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure("the_index", "id", new TranslogException(new ShardId("the_index", "uid", 0), "translog error")) - ) - ); - // 6 - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure( - "the_index", - "id", - new ElasticsearchSecurityException("Authentication required", RestStatus.UNAUTHORIZED) - ) - ) - ); - // 7 - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure( - "the_index", - "id", - new ElasticsearchSecurityException("current license is non-compliant for [transform]", RestStatus.FORBIDDEN) - ) - ) - ); - // 8 not irrecoverable - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure( - "the_index", - "id", - new ElasticsearchSecurityException("overloaded, to many requests", RestStatus.TOO_MANY_REQUESTS) - ) - ) - ); - // 9 not irrecoverable - bulkItemResponses.put( - id, - BulkItemResponse.failure( - id++, - OpType.INDEX, - new BulkItemResponse.Failure( - "the_index", - "id", - new ElasticsearchSecurityException("internal error", RestStatus.INTERNAL_SERVER_ERROR) - ) - ) + Map bulkItemResponses = bulkItemResponses( + new DocumentParsingException(XContentLocation.UNKNOWN, "document parsing error"), + new ResourceNotFoundException("resource not found error"), + new IllegalArgumentException("illegal argument error"), + new EsRejectedExecutionException("es rejected execution"), + new TranslogException(new ShardId("the_index", "uid", 0), "translog error"), + new ElasticsearchSecurityException("Authentication required", RestStatus.UNAUTHORIZED), + new ElasticsearchSecurityException("current license is non-compliant for [transform]", RestStatus.FORBIDDEN), + new ElasticsearchSecurityException("overloaded, to many requests", RestStatus.TOO_MANY_REQUESTS), + new ElasticsearchSecurityException("internal error", RestStatus.INTERNAL_SERVER_ERROR), + new IndexNotFoundException("some missing index") ); assertFirstException(bulkItemResponses.values(), DocumentParsingException.class, "document parsing error"); @@ -157,6 +69,14 @@ public void testGetFirstIrrecoverableExceptionFromBulkResponses() { assertNull(ExceptionRootCauseFinder.getFirstIrrecoverableExceptionFromBulkResponses(bulkItemResponses.values())); } + private static Map bulkItemResponses(Exception... exceptions) { + var id = new AtomicInteger(1); + return Arrays.stream(exceptions) + .map(exception -> new BulkItemResponse.Failure("the_index", "id", exception)) + .map(failure -> BulkItemResponse.failure(id.get(), OpType.INDEX, failure)) + .collect(Collectors.toMap(response -> id.getAndIncrement(), Function.identity())); + } + public void testIsIrrecoverable() { assertFalse(ExceptionRootCauseFinder.isExceptionIrrecoverable(new MapperException("mappings problem"))); assertFalse(ExceptionRootCauseFinder.isExceptionIrrecoverable(new TaskCancelledException("cancelled task"))); @@ -174,6 +94,7 @@ public void testIsIrrecoverable() { assertTrue( ExceptionRootCauseFinder.isExceptionIrrecoverable(new DocumentParsingException(new XContentLocation(1, 2), "parse error")) ); + assertTrue(ExceptionRootCauseFinder.isExceptionIrrecoverable(new IndexNotFoundException("some missing index"))); } private static void assertFirstException(Collection bulkItemResponses, Class expectedClass, String message) { From 1bc64745f2c397886efd3555e365f88f5089be51 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 9 May 2024 13:30:21 -0700 Subject: [PATCH 035/119] Add number of nodes accessor to test cluster handle (#108484) Local test clusters have several methods allowing interaction with nodes by ordinal number. However, there is currently no way to know how mnany nodes were actually configured for the cluster. This commit adds an accessor for the number of nodes the cluster handle knows about. --- .../test/cluster/local/DefaultLocalClusterHandle.java | 5 +++++ .../cluster/local/DefaultLocalElasticsearchCluster.java | 5 +++++ .../test/cluster/local/LocalClusterHandle.java | 6 ++++++ 3 files changed, 16 insertions(+) diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterHandle.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterHandle.java index 718c9c1bb004..5292d917df63 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterHandle.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterHandle.java @@ -65,6 +65,11 @@ public DefaultLocalClusterHandle(String name, List nodes) { this.nodes = nodes; } + @Override + public int getNumNodes() { + return nodes.size(); + } + @Override public void start() { if (started.getAndSet(true) == false) { diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalElasticsearchCluster.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalElasticsearchCluster.java index 77b73e7b6ce8..7b24709b18a9 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalElasticsearchCluster.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalElasticsearchCluster.java @@ -54,6 +54,11 @@ public void evaluate() throws Throwable { }; } + @Override + public int getNumNodes() { + return handle.getNumNodes(); + } + @Override public void start() { checkHandle(); diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterHandle.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterHandle.java index 7a95d682e9dd..acb9ef77b9e4 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterHandle.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterHandle.java @@ -16,6 +16,12 @@ import java.io.InputStream; public interface LocalClusterHandle extends ClusterHandle { + + /** + * Returns the number of nodes that are part of this cluster. + */ + int getNumNodes(); + /** * Stops the node at a given index. * @param index of the node to stop From b26dc840cf7036d62275ede0a9cd4665016ddadd Mon Sep 17 00:00:00 2001 From: Dianna Hohensee Date: Thu, 9 May 2024 16:58:26 -0400 Subject: [PATCH 036/119] Explain Settings in the arch. guide (#107379) --- docs/internal/GeneralArchitectureGuide.md | 60 +++++++++++++++++++ .../common/settings/Setting.java | 2 +- 2 files changed, 61 insertions(+), 1 deletion(-) diff --git a/docs/internal/GeneralArchitectureGuide.md b/docs/internal/GeneralArchitectureGuide.md index f865277d07f8..a2dadb70bf97 100644 --- a/docs/internal/GeneralArchitectureGuide.md +++ b/docs/internal/GeneralArchitectureGuide.md @@ -6,6 +6,66 @@ ## Settings +Elasticsearch supports [cluster-level settings][] and [index-level settings][], configurable via [node-level file settings][] +(e.g. `elasticsearch.yml` file), command line arguments and REST APIs. + +### Declaring a Setting + +[cluster-level settings]: https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-update-settings.html +[index-level settings]: https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-update-settings.html +[node-level file settings]: https://www.elastic.co/guide/en/elasticsearch/reference/current/settings.html + +The [Setting][] class is the building block for Elasticsearch server settings. Each `Setting` can take multiple [Property][] +declarations to define setting characteristics. All setting values first come from the node-local `elasticsearch.yml` file, +if they are set therein, before falling back to the default specified in their `Setting` declaration. [A setting][] with +`Property.Dynamic` can be updated during runtime, but must be paired with a [local volatile variable like this one][] and +registered in the `ClusterSettings` via a utility like [ClusterSettings#initializeAndWatch()][] to catch and immediately +apply dynamic changes. NB that a common dynamic Setting bug is always reading the value directly from [Metadata#settings()][], +which holds the default and dynamically updated values, but _not_ the node-local `elasticsearch.yml` value. The scope of a +Setting must also be declared, such as `Property.IndexScope` for a setting that applies to indexes, or `Property.NodeScope` +for a cluster-level setting. + +[Setting]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/common/settings/Setting.java#L57-L80 +[Property]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/common/settings/Setting.java#L82 +[A setting]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java#L111-L117 +[local volatile variable like this one]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java#L123 +[ClusterSettings#initializeAndWatch()]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java#L145 +[Metadata#settings()]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java#L713-L715 + +[ClusterSettings][] tracks the [core Elasticsearch settings][]. Ultimately the `ClusterSettings` get loaded via the +[SettingsModule][]. Additional settings from the various plugins are [collected during node construction] and passed into the +[SettingsModule constructor][]. The Plugin interface has a [getSettings()][] method via which each plugin can declare additional +settings. + +[ClusterSettings]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java#L138 +[core Elasticsearch settings]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java#L204-L586 +[SettingsModule]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/common/settings/SettingsModule.java#L54 +[collected during node construction]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/node/NodeConstruction.java#L483 +[SettingsModule constructor]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/node/NodeConstruction.java#L491-L495 +[getSettings()]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/plugins/Plugin.java#L203-L208 + +### Dynamically updating a Setting + +Externally, [TransportClusterUpdateSettingsAction][] and [TransportUpdateSettingsAction][] (and the corresponding REST endpoints) +allow users to dynamically change cluster and index settings, respectively. Internally, `AbstractScopedSettings` (parent class +of `ClusterSettings`) has various helper methods to track dynamic changes: it keeps a [registry of `SettingUpdater`][] consumer +lambdas to run updates when settings are changed in the cluster state. The `ClusterApplierService` [sends setting updates][] +through to the `AbstractScopedSettings`, invoking the consumers registered therein for each updated setting. + +[TransportClusterUpdateSettingsAction]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java#L154-L160 +[TransportUpdateSettingsAction]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java#L96-L101 +[registry of `SettingUpdater`]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java#L379-L381 +[sends setting updates]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java#L490-L494 + +Index settings are always persisted. They can only be modified on an existing index, and setting values are persisted as part +of the `IndexMetadata`. Cluster settings, however, can be either persisted or transient depending on how they are tied to +[Metadata][] ([applied here][]). Changes to persisted cluster settings will survive a full cluster restart; whereas changes +made to transient cluster settings will reset to their default values, or the `elasticsearch.yml` values, if the cluster +state must ever be reloaded from persisted state. + +[Metadata]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java#L212-L213 +[applied here]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java#L2437 + ## Deprecations ## Plugins diff --git a/server/src/main/java/org/elasticsearch/common/settings/Setting.java b/server/src/main/java/org/elasticsearch/common/settings/Setting.java index 4fb02fdaac7b..a385950e1092 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -113,7 +113,7 @@ public enum Property { DeprecatedWarning, /** - * Node scope + * Cluster-level or configuration file-level setting. Not an index setting. */ NodeScope, From 0081c1cd2b0ef5de5d97c25a47d598deb4a3a2fb Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Thu, 9 May 2024 16:33:08 -0700 Subject: [PATCH 037/119] Bump esql test suite timeout (#108488) --- .../elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index 448d39913a8f..fc65cb990f82 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -7,8 +7,10 @@ package org.elasticsearch.xpack.esql.qa.rest; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; import org.apache.http.HttpEntity; +import org.apache.lucene.tests.util.TimeUnits; import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.client.Request; @@ -56,6 +58,8 @@ import static org.elasticsearch.xpack.ql.CsvSpecReader.specParser; import static org.elasticsearch.xpack.ql.TestUtils.classpathResources; +// This test can run very long in serverless configurations +@TimeoutSuite(millis = 30 * TimeUnits.MINUTE) public abstract class EsqlSpecTestCase extends ESRestTestCase { // To avoid referencing the main module, we replicate EsqlFeatures.ASYNC_QUERY.id() here From e10b3d4b2b51ea095ec841625d7ec74e6ad5d7da Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Thu, 9 May 2024 16:33:26 -0700 Subject: [PATCH 038/119] Increase startup timeout in packaging tests (#108487) --- .../test/java/org/elasticsearch/packaging/util/Archives.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Archives.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Archives.java index ecc043906bd1..787069eb2605 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Archives.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Archives.java @@ -264,7 +264,7 @@ public static Shell.Result startElasticsearchWithTty( Locale.ROOT, """ expect - < Date: Fri, 10 May 2024 08:47:40 +0100 Subject: [PATCH 039/119] Handle must_not clauses when disabling the weight matches highlighting mode (#108453) This change makes sure we check all queries, even the must_not ones, to decide if we should disable weight matches highlighting or not. Closes #101667 Closes #106693 --- .../test/search.highlight/10_unified.yml | 115 ++++++++++++------ .../uhighlight/CustomUnifiedHighlighter.java | 3 +- 2 files changed, 82 insertions(+), 36 deletions(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml index 3ae8f8b09aa4..ca1d22e4a1ce 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml @@ -14,12 +14,26 @@ setup: "postings": "type": "text" "index_options": "offsets" + "nested": + "type": "nested" + "properties": + "text": + "type": "text" + "vectors": + "type": "dense_vector" + "dims": 2 + "index": true + "similarity": "l2_norm" + - do: index: index: test id: "1" body: "text" : "The quick brown fox is brown." + "nested": + "text": "The quick brown fox is brown." + "vectors": [1, 2] - do: indices.refresh: {} @@ -43,6 +57,7 @@ teardown: "query" : { "multi_match" : { "query" : "quick brown fox", "fields" : [ "text*"] } }, "highlight" : { "type" : "unified", "fields" : { "*" : {} } } } + - length: { hits.hits.0.highlight: 3 } - match: {hits.hits.0.highlight.text.0: "The quick brown fox is brown."} - match: {hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown."} - match: {hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown."} @@ -58,6 +73,7 @@ teardown: "query" : { "combined_fields" : { "query" : "quick brown fox", "fields" : [ "text*"] } }, "highlight" : { "type" : "unified", "fields" : { "*" : {} } } } + - length: { hits.hits.0.highlight: 3 } - match: {hits.hits.0.highlight.text.0: "The quick brown fox is brown."} - match: {hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown."} - match: {hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown."} @@ -72,11 +88,13 @@ teardown: search: body: { "query": { "multi_match": { "query": "quick brown fox", "type": "phrase", "fields": [ "text*" ] } }, - "highlight": { "type": "unified", "fields": { "*": { } } } } + "highlight": { "type": "unified", "fields": { "*": { } } } + } - - match: { hits.hits.0.highlight.text.0: "The quick brown fox is brown." } - - match: { hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown." } - - match: { hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown." } + - length: { hits.hits.0.highlight: 3 } + - match: { hits.hits.0.highlight.text.0: "The quick brown fox is brown." } + - match: { hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown." } + - match: { hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown." } - do: indices.put_settings: @@ -90,6 +108,7 @@ teardown: "query" : { "multi_match" : { "query" : "quick brown fox", "type": "phrase", "fields" : [ "text*"] } }, "highlight" : { "type" : "unified", "fields" : { "*" : {} } } } + - length: { hits.hits.0.highlight: 3 } - match: {hits.hits.0.highlight.text.0: "The quick brown fox is brown."} - match: {hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown."} - match: {hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown."} @@ -100,43 +119,69 @@ teardown: reason: 'kNN was not correctly skipped until 8.12' - do: - indices.create: - index: test-highlighting-knn - body: - mappings: - "properties": - "vectors": - "type": "dense_vector" - "dims": 2 - "index": true - "similarity": "l2_norm" - "text": - "type": "text" - "fields": - "fvh": - "type": "text" - "term_vector": "with_positions_offsets" - "postings": - "type": "text" - "index_options": "offsets" - - do: - index: - index: test-highlighting-knn - id: "1" - body: - "text" : "The quick brown fox is brown." - "vectors": [1, 2] + search: + index: test + body: { + "query": { "multi_match": { "query": "quick brown fox", "type": "phrase", "fields": [ "text*" ] } }, + "highlight": { "type": "unified", "fields": { "text*": { } } }, + "knn": { "field": "vectors", "query_vector": [1, 2], "k": 10, "num_candidates": 10 } } + + - length: { hits.hits.0.highlight: 3 } + - match: { hits.hits.0.highlight.text.0: "The quick brown fox is brown." } + - match: { hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown." } + - match: { hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown." } + +--- +"Test nested queries automatically disable weighted mode": + - requires: + cluster_features: "gte_v8.15.0" + reason: 'nested was not correctly skipped until 8.15' + - do: - indices.refresh: {} + search: + index: test + body: { + "query": { + "nested": { + "path": "nested", + "query": { + "multi_match": { + "query": "quick brown fox", + "type": "phrase", + "fields": [ "nested.text" ] + } + } + } + }, + "highlight": { "type": "unified", "fields": { "*": { } } } + } + + - length: { hits.hits.0.highlight: 1 } + - match: { hits.hits.0.highlight.nested\.text.0: "The quick brown fox is brown." } - do: search: - index: test-highlighting-knn + index: test body: { - "query": { "multi_match": { "query": "quick brown fox", "type": "phrase", "fields": [ "text*" ] } }, - "highlight": { "type": "unified", "fields": { "*": { } } }, - "knn": { "field": "vectors", "query_vector": [1, 2], "k": 10, "num_candidates": 10 } } + "query": { + "bool": { + "must_not": { + "nested": { + "path": "nested", + "query": { + "multi_match": { "query": "quick red fox", "type": "phrase", "fields": [ "nested.text" ] } + } + } + }, + "should": { + "multi_match": { "query": "quick brown fox", "type": "phrase", "fields": [ "text*" ] } + } + } + }, + "highlight": { "type": "unified", "fields": { "text*": { } } } + } + - length: { hits.hits.0.highlight: 3 } - match: { hits.hits.0.highlight.text.0: "The quick brown fox is brown." } - match: { hits.hits.0.highlight.text\.fvh.0: "The quick brown fox is brown." } - match: { hits.hits.0.highlight.text\.postings.0: "The quick brown fox is brown." } diff --git a/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighter.java b/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighter.java index 5c1381f73001..c29e248b1a68 100644 --- a/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighter.java +++ b/server/src/main/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighter.java @@ -293,7 +293,8 @@ public QueryVisitor getSubVisitor(BooleanClause.Occur occur, Query parent) { if (parent instanceof ESToParentBlockJoinQuery) { hasUnknownLeaf[0] = true; } - return super.getSubVisitor(occur, parent); + // we want to visit all queries, including those within the must_not clauses. + return this; } }); return hasUnknownLeaf[0]; From bc37ecfbafefd7cb84976cd17a8129bd7c24afac Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Fri, 10 May 2024 09:48:37 +0100 Subject: [PATCH 040/119] Specify some parameters as always supported by capabilities (#108461) --- .../java/org/elasticsearch/rest/BaseRestHandler.java | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java index 70801cdef560..b142e4d567c0 100644 --- a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java @@ -76,13 +76,18 @@ public final long getUsageCount() { @Override public abstract List routes(); + private static final Set ALWAYS_SUPPORTED = Set.of("format", "filter_path", "pretty", "human"); + @Override public final void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { // check if the query has any parameters that are not in the supported set (if declared) Set supported = supportedQueryParameters(); - if (supported != null && supported.containsAll(request.params().keySet()) == false) { - Set unsupported = Sets.difference(request.params().keySet(), supported); - throw new IllegalArgumentException(unrecognized(request, unsupported, supported, "parameter")); + if (supported != null) { + var allSupported = Sets.union(ALWAYS_SUPPORTED, supported); + if (allSupported.containsAll(request.params().keySet()) == false) { + Set unsupported = Sets.difference(request.params().keySet(), allSupported); + throw new IllegalArgumentException(unrecognized(request, unsupported, allSupported, "parameter")); + } } // prepare the request for execution; has the side effect of touching the request parameters From 0eae05633684c6b2c974cd0272713bf52c4ab66d Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Fri, 10 May 2024 11:10:06 +0200 Subject: [PATCH 041/119] [Inference API] Add AzureOpenAiCompletionServiceSettings and AzureOpenAiCompletionTaskSettings to InferenceNamedWriteablesProvider (#108491) --- .../InferenceNamedWriteablesProvider.java | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java index 8d01b25aa279..41bef3521cdf 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java @@ -26,6 +26,8 @@ import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.inference.services.azureopenai.AzureOpenAiSecretSettings; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionServiceSettings; +import org.elasticsearch.xpack.inference.services.azureopenai.completion.AzureOpenAiCompletionTaskSettings; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsServiceSettings; import org.elasticsearch.xpack.inference.services.azureopenai.embeddings.AzureOpenAiEmbeddingsTaskSettings; import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettings; @@ -237,6 +239,21 @@ public static List getNamedWriteables() { ) ); + namedWriteables.add( + new NamedWriteableRegistry.Entry( + ServiceSettings.class, + AzureOpenAiCompletionServiceSettings.NAME, + AzureOpenAiCompletionServiceSettings::new + ) + ); + namedWriteables.add( + new NamedWriteableRegistry.Entry( + TaskSettings.class, + AzureOpenAiCompletionTaskSettings.NAME, + AzureOpenAiCompletionTaskSettings::new + ) + ); + return namedWriteables; } } From 2541ce9c4d37191f43cfc0be3c9462adbb8dc1fb Mon Sep 17 00:00:00 2001 From: Pooya Salehi Date: Fri, 10 May 2024 11:47:31 +0200 Subject: [PATCH 042/119] Log skipped prevoting as INFO (#108411) Relates ES-6576 --- .../org/elasticsearch/cluster/coordination/Coordinator.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java index 156ba88a7d2b..daff05f0fb19 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java @@ -1781,7 +1781,7 @@ public void run() { final var nodeEligibility = localNodeMayWinElection(lastAcceptedState, electionStrategy); if (nodeEligibility.mayWin() == false) { assert nodeEligibility.reason().isEmpty() == false; - logger.trace( + logger.info( "skip prevoting as local node may not win election ({}): {}", nodeEligibility.reason(), lastAcceptedState.coordinationMetadata() From 2e0f8d087c370c43d258c2e1ac4e5ac91a2a9c2d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Fri, 10 May 2024 11:58:34 +0200 Subject: [PATCH 043/119] Add a SIMD (AVX2) optimised vector distance function for int7 on x64 (#108088) * Adding support for x64 to native vec library * Fix: aarch64 sqr7u dims * Fix: add symbol stripping (deb lintian) --------- Co-authored-by: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Co-authored-by: Elastic Machine --- docs/changelog/108088.yaml | 5 + libs/native/libraries/build.gradle | 2 +- .../nativeaccess/PosixNativeAccess.java | 10 +- .../VectorSimilarityFunctionsTests.java | 4 +- libs/vec/native/Dockerfile | 5 +- libs/vec/native/build.gradle | 76 +++++++-- libs/vec/native/publish_vec_binaries.sh | 16 +- libs/vec/native/src/vec/c/{ => aarch64}/vec.c | 2 +- libs/vec/native/src/vec/c/amd64/vec.c | 150 ++++++++++++++++++ libs/vec/native/src/vec/headers/vec.h | 2 +- .../vec/AbstractVectorTestCase.java | 4 +- 11 files changed, 254 insertions(+), 22 deletions(-) create mode 100644 docs/changelog/108088.yaml rename libs/vec/native/src/vec/c/{ => aarch64}/vec.c (99%) create mode 100644 libs/vec/native/src/vec/c/amd64/vec.c diff --git a/docs/changelog/108088.yaml b/docs/changelog/108088.yaml new file mode 100644 index 000000000000..95c58f6dc19f --- /dev/null +++ b/docs/changelog/108088.yaml @@ -0,0 +1,5 @@ +pr: 108088 +summary: Add a SIMD (AVX2) optimised vector distance function for int7 on x64 +area: "Search" +type: enhancement +issues: [] diff --git a/libs/native/libraries/build.gradle b/libs/native/libraries/build.gradle index 168eb533fea7..7a545787bbda 100644 --- a/libs/native/libraries/build.gradle +++ b/libs/native/libraries/build.gradle @@ -18,7 +18,7 @@ configurations { } var zstdVersion = "1.5.5" -var vecVersion = "1.0.6" +var vecVersion = "1.0.8" repositories { exclusiveContent { diff --git a/libs/native/src/main/java/org/elasticsearch/nativeaccess/PosixNativeAccess.java b/libs/native/src/main/java/org/elasticsearch/nativeaccess/PosixNativeAccess.java index 56017d3a8a20..c390cfc9289c 100644 --- a/libs/native/src/main/java/org/elasticsearch/nativeaccess/PosixNativeAccess.java +++ b/libs/native/src/main/java/org/elasticsearch/nativeaccess/PosixNativeAccess.java @@ -45,7 +45,15 @@ public Optional getVectorSimilarityFunctions() { } static boolean isNativeVectorLibSupported() { - return Runtime.version().feature() >= 21 && isMacOrLinuxAarch64() && checkEnableSystemProperty(); + return Runtime.version().feature() >= 21 && (isMacOrLinuxAarch64() || isLinuxAmd64()) && checkEnableSystemProperty(); + } + + /** + * Returns true iff the architecture is x64 (amd64) and the OS Linux (the OS we currently support for the native lib). + */ + static boolean isLinuxAmd64() { + String name = System.getProperty("os.name"); + return (name.startsWith("Linux")) && System.getProperty("os.arch").equals("amd64"); } /** Returns true iff the OS is Mac or Linux, and the architecture is aarch64. */ diff --git a/libs/native/src/test/java/org/elasticsearch/nativeaccess/VectorSimilarityFunctionsTests.java b/libs/native/src/test/java/org/elasticsearch/nativeaccess/VectorSimilarityFunctionsTests.java index adf32874c04f..8c4cbb688abc 100644 --- a/libs/native/src/test/java/org/elasticsearch/nativeaccess/VectorSimilarityFunctionsTests.java +++ b/libs/native/src/test/java/org/elasticsearch/nativeaccess/VectorSimilarityFunctionsTests.java @@ -37,7 +37,9 @@ public boolean supported() { var arch = System.getProperty("os.arch"); var osName = System.getProperty("os.name"); - if (jdkVersion >= 21 && arch.equals("aarch64") && (osName.startsWith("Mac") || osName.equals("Linux"))) { + if (jdkVersion >= 21 + && ((arch.equals("aarch64") && (osName.startsWith("Mac") || osName.equals("Linux"))) + || (arch.equals("amd64") && osName.equals("Linux")))) { assertThat(vectorSimilarityFunctions, isPresent()); return true; } else { diff --git a/libs/vec/native/Dockerfile b/libs/vec/native/Dockerfile index 25dcf4d4854d..66eb7e92ef47 100644 --- a/libs/vec/native/Dockerfile +++ b/libs/vec/native/Dockerfile @@ -4,6 +4,7 @@ RUN apt update RUN apt install -y gcc g++ openjdk-17-jdk COPY . /workspace WORKDIR /workspace -RUN ./gradlew --quiet --console=plain clean vecSharedLibrary +RUN ./gradlew --quiet --console=plain clean buildSharedLibrary +RUN strip --strip-unneeded build/output/libvec.so -CMD cat build/libs/vec/shared/libvec.so +CMD cat build/output/libvec.so diff --git a/libs/vec/native/build.gradle b/libs/vec/native/build.gradle index 6a658da0644b..7edf46d40686 100644 --- a/libs/vec/native/build.gradle +++ b/libs/vec/native/build.gradle @@ -12,9 +12,10 @@ var os = org.gradle.internal.os.OperatingSystem.current() // To update this library run publish_vec_binaries.sh ( or ./gradlew vecSharedLibrary ) // Or // For local development, build the docker image with: -// docker build --platform linux/arm64 --progress=plain . +// docker build --platform linux/arm64 --progress=plain . (for aarch64) +// docker build --platform linux/amd64 --progress=plain . (for x64) // Grab the image id from the console output, then, e.g. -// docker run 9c9f36564c148b275aeecc42749e7b4580ded79dcf51ff6ccc008c8861e7a979 > build/libs/vec/shared/libvec.so +// docker run 9c9f36564c148b275aeecc42749e7b4580ded79dcf51ff6ccc008c8861e7a979 > build/libs/vec/shared/$arch/libvec.so // // To run tests and benchmarks on a locally built libvec, // 1. Temporarily comment out the download in libs/native/library/build.gradle @@ -30,26 +31,83 @@ var os = org.gradle.internal.os.OperatingSystem.current() group = 'org.elasticsearch' +def platformName = System.getProperty("os.arch"); + model { + platforms { + aarch64 { + architecture "aarch64" + } + amd64 { + architecture "x86-64" + } + } toolChains { gcc(Gcc) { target("aarch64") { cCompiler.executable = "/usr/bin/gcc" + cCompiler.withArguments { args -> args.addAll(["-O3", "-std=c99", "-march=armv8-a"]) } + } + target("amd64") { + cCompiler.executable = "/usr/bin/gcc" + cCompiler.withArguments { args -> args.addAll(["-O3", "-std=c99", "-march=core-avx2", "-Wno-incompatible-pointer-types"]) } } } - clang(Clang) - } - platforms { - aarch64 { - architecture "aarch64" + cl(VisualCpp) { + eachPlatform { toolchain -> + def platform = toolchain.getPlatform() + if (platform.name == "x64") { + cCompiler.withArguments { args -> args.addAll(["/O2", "/LD", "-march=core-avx2"]) } + } + } + } + clang(Clang) { + target("amd64") { + cCompiler.withArguments { args -> args.addAll(["-O3", "-std=c99", "-march=core-avx2"]) } + } } } components { vec(NativeLibrarySpec) { targetPlatform "aarch64" - binaries.withType(SharedLibraryBinarySpec) { - cCompiler.args "-O3", "-std=c99", "-march=armv8-a" + targetPlatform "amd64" + + sources { + c { + source { + srcDir "src/vec/c/${platformName}/" + include "*.c" + } + exportedHeaders { + srcDir "src/vec/headers/" + } + } + } + } + } +} + +tasks.register('buildSharedLibrary') { + description = 'Assembles native shared library for the host architecture' + if (platformName.equals("aarch64")) { + dependsOn tasks.vecAarch64SharedLibrary + doLast { + copy { + from tasks.linkVecAarch64SharedLibrary.outputs.files.files + into layout.buildDirectory.dir('output'); + duplicatesStrategy = 'INCLUDE' + } + } + } else if (platformName.equals("amd64")) { + dependsOn tasks.vecAmd64SharedLibrary + doLast { + copy { + from tasks.linkVecAmd64SharedLibrary.outputs.files.files + into layout.buildDirectory.dir('output'); + duplicatesStrategy = 'INCLUDE' } } + } else { + throw new GradleException("Unsupported platform: " + platformName) } } diff --git a/libs/vec/native/publish_vec_binaries.sh b/libs/vec/native/publish_vec_binaries.sh index e17690160e25..2ed6c750ab9e 100755 --- a/libs/vec/native/publish_vec_binaries.sh +++ b/libs/vec/native/publish_vec_binaries.sh @@ -19,7 +19,7 @@ if [ -z "$ARTIFACTORY_API_KEY" ]; then exit 1; fi -VERSION="1.0.6" +VERSION="1.0.8" ARTIFACTORY_REPOSITORY="${ARTIFACTORY_REPOSITORY:-https://artifactory.elastic.dev/artifactory/elasticsearch-native/}" TEMP=$(mktemp -d) @@ -29,16 +29,22 @@ if curl -sS -I --fail --location "${ARTIFACTORY_REPOSITORY}/org/elasticsearch/ve fi echo 'Building Darwin binary...' -./gradlew --quiet --console=plain vecSharedLibrary +./gradlew --quiet --console=plain vecAarch64SharedLibrary echo 'Building Linux binary...' DOCKER_IMAGE=$(docker build --platform linux/arm64 --quiet .) -docker run $DOCKER_IMAGE > build/libs/vec/shared/libvec.so +docker run $DOCKER_IMAGE > build/libs/vec/shared/aarch64/libvec.so + +echo 'Building Linux x64 binary...' +DOCKER_IMAGE=$(docker build --platform linux/amd64 --quiet .) +docker run --platform linux/amd64 $DOCKER_IMAGE > build/libs/vec/shared/amd64/libvec.so mkdir -p $TEMP/darwin-aarch64 mkdir -p $TEMP/linux-aarch64 -cp build/libs/vec/shared/libvec.dylib $TEMP/darwin-aarch64/ -cp build/libs/vec/shared/libvec.so $TEMP/linux-aarch64/ +mkdir -p $TEMP/linux-x64 +cp build/libs/vec/shared/aarch64/libvec.dylib $TEMP/darwin-aarch64/ +cp build/libs/vec/shared/aarch64/libvec.so $TEMP/linux-aarch64/ +cp build/libs/vec/shared/amd64/libvec.so $TEMP/linux-x64/ echo 'Uploading to Artifactory...' (cd $TEMP && zip -rq - .) | curl -sS -X PUT -H "X-JFrog-Art-Api: ${ARTIFACTORY_API_KEY}" --data-binary @- --location "${ARTIFACTORY_REPOSITORY}/org/elasticsearch/vec/${VERSION}/vec-${VERSION}.zip" diff --git a/libs/vec/native/src/vec/c/vec.c b/libs/vec/native/src/vec/c/aarch64/vec.c similarity index 99% rename from libs/vec/native/src/vec/c/vec.c rename to libs/vec/native/src/vec/c/aarch64/vec.c index 05dfe64a3be9..478e5e84d385 100644 --- a/libs/vec/native/src/vec/c/vec.c +++ b/libs/vec/native/src/vec/c/aarch64/vec.c @@ -121,7 +121,7 @@ static inline int32_t sqr7u_inner(int8_t *a, int8_t *b, size_t dims) { EXPORT int32_t sqr7u(int8_t* a, int8_t* b, size_t dims) { int32_t res = 0; int i = 0; - if (i > SQR7U_STRIDE_BYTES_LEN) { + if (dims > SQR7U_STRIDE_BYTES_LEN) { i += dims & ~(SQR7U_STRIDE_BYTES_LEN - 1); res = sqr7u_inner(a, b, i); } diff --git a/libs/vec/native/src/vec/c/amd64/vec.c b/libs/vec/native/src/vec/c/amd64/vec.c new file mode 100644 index 000000000000..c9a49ad2d1d4 --- /dev/null +++ b/libs/vec/native/src/vec/c/amd64/vec.c @@ -0,0 +1,150 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +#include +#include +#include "vec.h" + +#include +#include + +#ifndef DOT7U_STRIDE_BYTES_LEN +#define DOT7U_STRIDE_BYTES_LEN 32 // Must be a power of 2 +#endif + +#ifndef SQR7U_STRIDE_BYTES_LEN +#define SQR7U_STRIDE_BYTES_LEN 32 // Must be a power of 2 +#endif + +#ifdef _MSC_VER +#include +#elif __GNUC__ +#include +#elif __clang__ +#include +#endif + +// Multi-platform CPUID "intrinsic"; it takes as input a "functionNumber" (or "leaf", the eax registry). "Subleaf" +// is always 0. Output is stored in the passed output parameter: output[0] = eax, output[1] = ebx, output[2] = ecx, +// output[3] = edx +static inline void cpuid(int output[4], int functionNumber) { +#if defined(__GNUC__) || defined(__clang__) + // use inline assembly, Gnu/AT&T syntax + int a, b, c, d; + __asm("cpuid" : "=a"(a), "=b"(b), "=c"(c), "=d"(d) : "a"(functionNumber), "c"(0) : ); + output[0] = a; + output[1] = b; + output[2] = c; + output[3] = d; + +#elif defined (_MSC_VER) + __cpuidex(output, functionNumber, 0); +#else + #error Unsupported compiler +#endif +} + +// Utility function to horizontally add 8 32-bit integers +static inline int hsum_i32_8(const __m256i a) { + const __m128i sum128 = _mm_add_epi32(_mm256_castsi256_si128(a), _mm256_extractf128_si256(a, 1)); + const __m128i hi64 = _mm_unpackhi_epi64(sum128, sum128); + const __m128i sum64 = _mm_add_epi32(hi64, sum128); + const __m128i hi32 = _mm_shuffle_epi32(sum64, _MM_SHUFFLE(2, 3, 0, 1)); + return _mm_cvtsi128_si32(_mm_add_epi32(sum64, hi32)); +} + +EXPORT int vec_caps() { + int cpuInfo[4] = {-1}; + // Calling __cpuid with 0x0 as the function_id argument + // gets the number of the highest valid function ID. + cpuid(cpuInfo, 0); + int functionIds = cpuInfo[0]; + if (functionIds >= 7) { + cpuid(cpuInfo, 7); + int ebx = cpuInfo[1]; + // AVX2 flag is the 5th bit + // We assume that all processors that have AVX2 also have FMA3 + return (ebx & (1 << 5)) != 0; + } + return 0; +} + +static inline int32_t dot7u_inner(int8_t* a, int8_t* b, size_t dims) { + const __m256i ones = _mm256_set1_epi16(1); + + // Init accumulator(s) with 0 + __m256i acc1 = _mm256_setzero_si256(); + +#pragma GCC unroll 4 + for(int i = 0; i < dims; i += DOT7U_STRIDE_BYTES_LEN) { + // Load packed 8-bit integers + __m256i va1 = _mm256_loadu_si256(a + i); + __m256i vb1 = _mm256_loadu_si256(b + i); + + // Perform multiplication and create 16-bit values + // Vertically multiply each unsigned 8-bit integer from va with the corresponding + // 8-bit integer from vb, producing intermediate signed 16-bit integers. + const __m256i vab = _mm256_maddubs_epi16(va1, vb1); + // Horizontally add adjacent pairs of intermediate signed 16-bit integers, and pack the results. + acc1 = _mm256_add_epi32(_mm256_madd_epi16(ones, vab), acc1); + } + + // reduce (horizontally add all) + return hsum_i32_8(acc1); +} + +EXPORT int32_t dot7u(int8_t* a, int8_t* b, size_t dims) { + int32_t res = 0; + int i = 0; + if (dims > DOT7U_STRIDE_BYTES_LEN) { + i += dims & ~(DOT7U_STRIDE_BYTES_LEN - 1); + res = dot7u_inner(a, b, i); + } + for (; i < dims; i++) { + res += a[i] * b[i]; + } + return res; +} + +static inline int32_t sqr7u_inner(int8_t *a, int8_t *b, size_t dims) { + // Init accumulator(s) with 0 + __m256i acc1 = _mm256_setzero_si256(); + + const __m256i ones = _mm256_set1_epi16(1); + +#pragma GCC unroll 4 + for(int i = 0; i < dims; i += SQR7U_STRIDE_BYTES_LEN) { + // Load packed 8-bit integers + __m256i va1 = _mm256_loadu_si256(a + i); + __m256i vb1 = _mm256_loadu_si256(b + i); + + const __m256i dist1 = _mm256_sub_epi8(va1, vb1); + const __m256i abs_dist1 = _mm256_sign_epi8(dist1, dist1); + const __m256i sqr1 = _mm256_maddubs_epi16(abs_dist1, abs_dist1); + + acc1 = _mm256_add_epi32(_mm256_madd_epi16(ones, sqr1), acc1); + } + + // reduce (accumulate all) + return hsum_i32_8(acc1); +} + +EXPORT int32_t sqr7u(int8_t* a, int8_t* b, size_t dims) { + int32_t res = 0; + int i = 0; + if (dims > SQR7U_STRIDE_BYTES_LEN) { + i += dims & ~(SQR7U_STRIDE_BYTES_LEN - 1); + res = sqr7u_inner(a, b, i); + } + for (; i < dims; i++) { + int32_t dist = a[i] - b[i]; + res += dist * dist; + } + return res; +} + diff --git a/libs/vec/native/src/vec/headers/vec.h b/libs/vec/native/src/vec/headers/vec.h index 5d3806dfccbe..49fa29ec6fae 100644 --- a/libs/vec/native/src/vec/headers/vec.h +++ b/libs/vec/native/src/vec/headers/vec.h @@ -7,7 +7,7 @@ */ #ifdef _MSC_VER -#define EXPORT extern "C" __declspec(dllexport) +#define EXPORT __declspec(dllexport) #elif defined(__GNUC__) && !defined(__clang__) #define EXPORT __attribute__((externally_visible,visibility("default"))) #elif __clang__ diff --git a/libs/vec/src/test/java/org/elasticsearch/vec/AbstractVectorTestCase.java b/libs/vec/src/test/java/org/elasticsearch/vec/AbstractVectorTestCase.java index 771f665fb408..13f2d5a03ec7 100644 --- a/libs/vec/src/test/java/org/elasticsearch/vec/AbstractVectorTestCase.java +++ b/libs/vec/src/test/java/org/elasticsearch/vec/AbstractVectorTestCase.java @@ -39,7 +39,9 @@ public static boolean supported() { var arch = System.getProperty("os.arch"); var osName = System.getProperty("os.name"); - if (jdkVersion >= 21 && arch.equals("aarch64") && (osName.startsWith("Mac") || osName.equals("Linux"))) { + if (jdkVersion >= 21 + && (arch.equals("aarch64") && (osName.startsWith("Mac") || osName.equals("Linux")) + || arch.equals("amd64") && osName.equals("Linux"))) { assertThat(factory, isPresent()); return true; } else { From d2d1357a334df228dd59878d844bf3870e1efc8b Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Fri, 10 May 2024 12:37:54 +0200 Subject: [PATCH 044/119] Expose capability checks for YAML REST tests (#108425) Co-authored-by: Simon Cooper --- .../rest-api-spec/api/capabilities.json | 47 ++++++++++ .../test/capabilities/10_basic.yml | 28 ++++++ .../SimpleNodesCapabilitiesIT.java | 10 +-- .../NodesCapabilitiesResponse.java | 10 ++- .../yaml/ClientYamlTestExecutionContext.java | 43 ++++++++- .../yaml/section/PrerequisiteSection.java | 90 ++++++++++++++++--- .../test/rest/yaml/section/Prerequisites.java | 20 ++++- .../section/PrerequisiteSectionTests.java | 83 ++++++++++++++++- 8 files changed, 307 insertions(+), 24 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/capabilities.json create mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/capabilities/10_basic.yml diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/capabilities.json b/rest-api-spec/src/main/resources/rest-api-spec/api/capabilities.json new file mode 100644 index 000000000000..28c341d9983c --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/capabilities.json @@ -0,0 +1,47 @@ +{ + "capabilities": { + "documentation": { + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/capabilities.html", + "description": "Checks if the specified combination of method, API, parameters, and arbitrary capabilities are supported" + }, + "stability": "experimental", + "visibility": "private", + "headers": { + "accept": [ + "application/json" + ] + }, + "url": { + "paths": [ + { + "path": "/_capabilities", + "methods": [ + "GET" + ] + } + ] + }, + "params": { + "method": { + "type": "enum", + "description": "REST method to check", + "options": [ + "GET", "HEAD", "POST", "PUT", "DELETE" + ], + "default": "GET" + }, + "path": { + "type": "string", + "description": "API path to check" + }, + "parameters": { + "type": "string", + "description": "Comma-separated list of API parameters to check" + }, + "capabilities": { + "type": "string", + "description": "Comma-separated list of arbitrary API capabilities to check" + } + } + } +} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/capabilities/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/capabilities/10_basic.yml new file mode 100644 index 000000000000..715e696bd103 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/capabilities/10_basic.yml @@ -0,0 +1,28 @@ +--- +"Capabilities API": + + - requires: + capabilities: + - method: GET + path: /_capabilities + parameters: [method, path, parameters, capabilities] + capabilities: [] + reason: "capabilities api requires itself to be supported" + + - do: + capabilities: + method: GET + path: /_capabilities + parameters: method,path,parameters,capabilities + error_trace: false + + - match: { supported: true } + + - do: + capabilities: + method: GET + path: /_capabilities + parameters: unknown + error_trace: false + + - match: { supported: false } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/nodescapabilities/SimpleNodesCapabilitiesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/nodescapabilities/SimpleNodesCapabilitiesIT.java index 7e4ae040caec..9b60044c94f7 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/nodescapabilities/SimpleNodesCapabilitiesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/nodescapabilities/SimpleNodesCapabilitiesIT.java @@ -15,8 +15,8 @@ import java.io.IOException; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresentWith; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.is; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) public class SimpleNodesCapabilitiesIT extends ESIntegTestCase { @@ -31,25 +31,25 @@ public void testNodesCapabilities() throws IOException { NodesCapabilitiesResponse response = clusterAdmin().nodesCapabilities(new NodesCapabilitiesRequest().path("_capabilities")) .actionGet(); assertThat(response.getNodes(), hasSize(2)); - assertThat(response.isSupported(), is(true)); + assertThat(response.isSupported(), isPresentWith(true)); // check we support some parameters of the capabilities API response = clusterAdmin().nodesCapabilities(new NodesCapabilitiesRequest().path("_capabilities").parameters("method", "path")) .actionGet(); assertThat(response.getNodes(), hasSize(2)); - assertThat(response.isSupported(), is(true)); + assertThat(response.isSupported(), isPresentWith(true)); // check we don't support some other parameters of the capabilities API response = clusterAdmin().nodesCapabilities(new NodesCapabilitiesRequest().path("_capabilities").parameters("method", "invalid")) .actionGet(); assertThat(response.getNodes(), hasSize(2)); - assertThat(response.isSupported(), is(false)); + assertThat(response.isSupported(), isPresentWith(false)); // check we don't support a random invalid api // TODO this is not working yet - see https://github.com/elastic/elasticsearch/issues/107425 /*response = clusterAdmin().nodesCapabilities(new NodesCapabilitiesRequest().path("_invalid")) .actionGet(); assertThat(response.getNodes(), hasSize(2)); - assertThat(response.isSupported(), is(false));*/ + assertThat(response.isSupported(), isPresentWith(false));*/ } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesResponse.java index 63fdb9f7da08..c2acbf65f6e5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesResponse.java @@ -19,6 +19,7 @@ import java.io.IOException; import java.util.List; +import java.util.Optional; public class NodesCapabilitiesResponse extends BaseNodesResponse implements ToXContentFragment { protected NodesCapabilitiesResponse(ClusterName clusterName, List nodes, List failures) { @@ -35,12 +36,15 @@ protected void writeNodesTo(StreamOutput out, List nodes) throws TransportAction.localOnly(); } - public boolean isSupported() { - return getNodes().isEmpty() == false && getNodes().stream().allMatch(NodeCapability::isSupported); + public Optional isSupported() { + // if there are any failures, we don't know if it is fully supported by all nodes in the cluster + if (hasFailures() || getNodes().isEmpty()) return Optional.empty(); + return Optional.of(getNodes().stream().allMatch(NodeCapability::isSupported)); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder.field("supported", isSupported()); + Optional supported = isSupported(); + return builder.field("supported", supported.orElse(null)); } } diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java index 10bf2fb4b0a9..4954065369ad 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java @@ -16,7 +16,9 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.util.BytesRef; import org.elasticsearch.client.NodeSelector; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.test.rest.Stash; import org.elasticsearch.test.rest.TestFeatureService; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi; @@ -25,14 +27,19 @@ import org.elasticsearch.xcontent.XContentType; import java.io.IOException; +import java.io.UncheckedIOException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.function.BiPredicate; +import static java.util.Collections.emptyList; +import static java.util.Collections.emptyMap; + /** * Execution context passed across the REST tests. * Holds the REST client used to communicate with elasticsearch. @@ -122,7 +129,15 @@ public ClientYamlTestResponse callApi( ) throws IOException { // makes a copy of the parameters before modifying them for this specific request Map requestParams = new HashMap<>(params); - requestParams.putIfAbsent("error_trace", "true"); // By default ask for error traces, this my be overridden by params + requestParams.compute("error_trace", (k, v) -> { + if (v == null) { + return "true"; // By default ask for error traces, this my be overridden by params + } else if (v.equals("false")) { + return null; + } else { + return v; + } + }); for (Map.Entry entry : requestParams.entrySet()) { if (stash.containsStashedValue(entry.getValue())) { entry.setValue(stash.getValue(entry.getValue()).toString()); @@ -264,4 +279,30 @@ public ClientYamlTestCandidate getClientYamlTestCandidate() { public boolean clusterHasFeature(String featureId) { return testFeatureService.clusterHasFeature(featureId); } + + public Optional clusterHasCapabilities(String method, String path, String parametersString, String capabilitiesString) { + Map params = Maps.newMapWithExpectedSize(5); + params.put("method", method); + params.put("path", path); + if (Strings.hasLength(parametersString)) { + params.put("parameters", parametersString); + } + if (Strings.hasLength(capabilitiesString)) { + params.put("capabilities", capabilitiesString); + } + params.put("error_trace", "false"); // disable error trace + try { + ClientYamlTestResponse resp = callApi("capabilities", params, emptyList(), emptyMap()); + // anything other than 200 should result in an exception, handled below + assert resp.getStatusCode() == 200 : "Unknown response code " + resp.getStatusCode(); + return Optional.ofNullable(resp.evaluate("supported")); + } catch (ClientYamlTestResponseException responseException) { + if (responseException.getRestTestResponse().getStatusCode() / 100 == 4) { + return Optional.empty(); // we don't know, the capabilities API is unsupported + } + throw new UncheckedIOException(responseException); + } catch (IOException ioException) { + throw new UncheckedIOException(ioException); + } + } } diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSection.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSection.java index 1ee447da1f11..c12de7e1155a 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSection.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSection.java @@ -19,6 +19,7 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -27,6 +28,7 @@ import java.util.function.Predicate; import static java.util.Collections.emptyList; +import static java.util.stream.Collectors.joining; /** * Represents a section where prerequisites to run a specific test section or suite are specified. It is possible to specify preconditions @@ -43,16 +45,23 @@ record KnownIssue(String clusterFeature, String fixedBy) { private static final Set FIELD_NAMES = Set.of("cluster_feature", "fixed_by"); } + record CapabilitiesCheck(String method, String path, String parameters, String capabilities) { + private static final Set FIELD_NAMES = Set.of("method", "path", "parameters", "capabilities"); + } + static class PrerequisiteSectionBuilder { - String skipVersionRange = null; String skipReason = null; - String requiresReason = null; - List requiredYamlRunnerFeatures = new ArrayList<>(); + String skipVersionRange = null; List skipOperatingSystems = new ArrayList<>(); List skipKnownIssues = new ArrayList<>(); String skipAwaitsFix = null; Set skipClusterFeatures = new HashSet<>(); + List skipCapabilities = new ArrayList<>(); + + String requiresReason = null; + List requiredYamlRunnerFeatures = new ArrayList<>(); Set requiredClusterFeatures = new HashSet<>(); + List requiredCapabilities = new ArrayList<>(); enum XPackRequired { NOT_SPECIFIED, @@ -116,11 +125,21 @@ public PrerequisiteSectionBuilder skipKnownIssue(KnownIssue knownIssue) { return this; } + public PrerequisiteSectionBuilder skipIfCapabilities(CapabilitiesCheck capabilitiesCheck) { + skipCapabilities.add(capabilitiesCheck); + return this; + } + public PrerequisiteSectionBuilder requireClusterFeature(String featureName) { requiredClusterFeatures.add(featureName); return this; } + public PrerequisiteSectionBuilder requireCapabilities(CapabilitiesCheck capabilitiesCheck) { + requiredCapabilities.add(capabilitiesCheck); + return this; + } + public PrerequisiteSectionBuilder skipIfOs(String osName) { this.skipOperatingSystems.add(osName); return this; @@ -128,13 +147,15 @@ public PrerequisiteSectionBuilder skipIfOs(String osName) { void validate(XContentLocation contentLocation) { if ((Strings.isEmpty(skipVersionRange)) - && requiredYamlRunnerFeatures.isEmpty() && skipOperatingSystems.isEmpty() - && xpackRequired == XPackRequired.NOT_SPECIFIED - && requiredClusterFeatures.isEmpty() && skipClusterFeatures.isEmpty() + && skipCapabilities.isEmpty() && skipKnownIssues.isEmpty() - && Strings.isEmpty(skipAwaitsFix)) { + && Strings.isEmpty(skipAwaitsFix) + && xpackRequired == XPackRequired.NOT_SPECIFIED + && requiredYamlRunnerFeatures.isEmpty() + && requiredCapabilities.isEmpty() + && requiredClusterFeatures.isEmpty()) { // TODO separate the validation for requires / skip when dropping parsing of legacy fields, e.g. features in skip throw new ParsingException(contentLocation, "at least one predicate is mandatory within a skip or requires section"); } @@ -143,11 +164,12 @@ void validate(XContentLocation contentLocation) { && (Strings.isEmpty(skipVersionRange) && skipOperatingSystems.isEmpty() && skipClusterFeatures.isEmpty() + && skipCapabilities.isEmpty() && skipKnownIssues.isEmpty()) == false) { throw new ParsingException(contentLocation, "reason is mandatory within this skip section"); } - if (Strings.isEmpty(requiresReason) && (requiredClusterFeatures.isEmpty() == false)) { + if (Strings.isEmpty(requiresReason) && ((requiredClusterFeatures.isEmpty() && requiredCapabilities.isEmpty()) == false)) { throw new ParsingException(contentLocation, "reason is mandatory within this requires section"); } @@ -190,6 +212,13 @@ public PrerequisiteSection build() { if (xpackRequired == XPackRequired.YES) { requiresCriteriaList.add(Prerequisites.hasXPack()); } + if (requiredClusterFeatures.isEmpty() == false) { + requiresCriteriaList.add(Prerequisites.requireClusterFeatures(requiredClusterFeatures)); + } + if (requiredCapabilities.isEmpty() == false) { + requiresCriteriaList.add(Prerequisites.requireCapabilities(requiredCapabilities)); + } + if (xpackRequired == XPackRequired.NO) { skipCriteriaList.add(Prerequisites.hasXPack()); } @@ -199,12 +228,12 @@ public PrerequisiteSection build() { if (skipOperatingSystems.isEmpty() == false) { skipCriteriaList.add(Prerequisites.skipOnOsList(skipOperatingSystems)); } - if (requiredClusterFeatures.isEmpty() == false) { - requiresCriteriaList.add(Prerequisites.requireClusterFeatures(requiredClusterFeatures)); - } if (skipClusterFeatures.isEmpty() == false) { skipCriteriaList.add(Prerequisites.skipOnClusterFeatures(skipClusterFeatures)); } + if (skipCapabilities.isEmpty() == false) { + skipCriteriaList.add(Prerequisites.skipCapabilities(skipCapabilities)); + } if (skipKnownIssues.isEmpty() == false) { skipCriteriaList.add(Prerequisites.skipOnKnownIssue(skipKnownIssues)); } @@ -287,6 +316,7 @@ static void parseSkipSection(XContentParser parser, PrerequisiteSectionBuilder b case "os" -> parseStrings(parser, builder::skipIfOs); case "cluster_features" -> parseStrings(parser, builder::skipIfClusterFeature); case "known_issues" -> parseArray(parser, PrerequisiteSection::parseKnownIssue, builder::skipKnownIssue); + case "capabilities" -> parseArray(parser, PrerequisiteSection::parseCapabilities, builder::skipIfCapabilities); default -> false; }; } @@ -337,12 +367,47 @@ private static KnownIssue parseKnownIssue(XContentParser parser) throws IOExcept if (fields.keySet().equals(KnownIssue.FIELD_NAMES) == false) { throw new ParsingException( parser.getTokenLocation(), - Strings.format("Expected fields %s, but got %s", KnownIssue.FIELD_NAMES, fields.keySet()) + Strings.format("Expected all of %s, but got %s", KnownIssue.FIELD_NAMES, fields.keySet()) ); } return new KnownIssue(fields.get("cluster_feature"), fields.get("fixed_by")); } + private static CapabilitiesCheck parseCapabilities(XContentParser parser) throws IOException { + Map fields = parser.map(); + if (CapabilitiesCheck.FIELD_NAMES.containsAll(fields.keySet()) == false) { + throw new ParsingException( + parser.getTokenLocation(), + Strings.format("Expected some of %s, but got %s", CapabilitiesCheck.FIELD_NAMES, fields.keySet()) + ); + } + Object path = fields.get("path"); + if (path == null) { + throw new ParsingException(parser.getTokenLocation(), "path is required"); + } + + return new CapabilitiesCheck( + ensureString(ensureString(fields.getOrDefault("method", "GET"))), + ensureString(path), + stringArrayAsParamString("parameters", fields), + stringArrayAsParamString("capabilities", fields) + ); + } + + private static String ensureString(Object obj) { + if (obj instanceof String str) return str; + throw new IllegalArgumentException("Expected STRING, but got: " + obj); + } + + private static String stringArrayAsParamString(String name, Map fields) { + Object value = fields.get(name); + if (value == null) return null; + if (value instanceof Collection values) { + return values.stream().map(PrerequisiteSection::ensureString).collect(joining(",")); + } + return ensureString(value); + } + static void parseRequiresSection(XContentParser parser, PrerequisiteSectionBuilder builder) throws IOException { requireStartObject("requires", parser.nextToken()); @@ -361,6 +426,7 @@ static void parseRequiresSection(XContentParser parser, PrerequisiteSectionBuild valid = switch (parser.currentName()) { case "test_runner_features" -> parseStrings(parser, f -> parseFeatureField(f, builder)); case "cluster_features" -> parseStrings(parser, builder::requireClusterFeature); + case "capabilities" -> parseArray(parser, PrerequisiteSection::parseCapabilities, builder::requireCapabilities); default -> false; }; } diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/Prerequisites.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/Prerequisites.java index ca10101a4612..86c035ebad62 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/Prerequisites.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/Prerequisites.java @@ -10,8 +10,11 @@ import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; +import org.elasticsearch.test.rest.yaml.section.PrerequisiteSection.CapabilitiesCheck; +import org.elasticsearch.test.rest.yaml.section.PrerequisiteSection.KnownIssue; import java.util.List; +import java.util.Optional; import java.util.Set; import java.util.function.Predicate; @@ -45,8 +48,23 @@ static Predicate skipOnClusterFeatures(Set clusterFeatures.stream().anyMatch(context::clusterHasFeature); } - static Predicate skipOnKnownIssue(List knownIssues) { + static Predicate skipOnKnownIssue(List knownIssues) { return context -> knownIssues.stream() .anyMatch(i -> context.clusterHasFeature(i.clusterFeature()) && context.clusterHasFeature(i.fixedBy()) == false); } + + static Predicate requireCapabilities(List checks) { + // requirement not fulfilled if unknown / capabilities API not supported + return context -> checks.stream().allMatch(check -> checkCapabilities(context, check).orElse(false)); + } + + static Predicate skipCapabilities(List checks) { + // skip if unknown / capabilities API not supported + return context -> checks.stream().anyMatch(check -> checkCapabilities(context, check).orElse(true)); + } + + private static Optional checkCapabilities(ClientYamlTestExecutionContext context, CapabilitiesCheck check) { + Optional b = context.clusterHasCapabilities(check.method(), check.path(), check.parameters(), check.capabilities()); + return b; + } } diff --git a/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSectionTests.java b/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSectionTests.java index a77b2cc5b40f..0bb31ae2c574 100644 --- a/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSectionTests.java +++ b/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/PrerequisiteSectionTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.core.Strings; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; +import org.elasticsearch.test.rest.yaml.section.PrerequisiteSection.CapabilitiesCheck; import org.elasticsearch.test.rest.yaml.section.PrerequisiteSection.KnownIssue; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.yaml.YamlXContent; @@ -20,8 +21,11 @@ import java.io.IOException; import java.util.List; +import java.util.Optional; import java.util.Set; +import static java.lang.Boolean.FALSE; +import static java.lang.Boolean.TRUE; import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static org.hamcrest.Matchers.contains; @@ -36,6 +40,8 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.oneOf; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -357,8 +363,8 @@ public void testParseSkipSectionIncompleteKnownIssues() throws Exception { e.getMessage(), is( oneOf( - ("Expected fields [cluster_feature, fixed_by], but got [cluster_feature]"), - ("Expected fields [fixed_by, cluster_feature], but got [cluster_feature]") + ("Expected all of [cluster_feature, fixed_by], but got [cluster_feature]"), + ("Expected all of [fixed_by, cluster_feature], but got [cluster_feature]") ) ) ); @@ -498,6 +504,42 @@ public void testParseRequireAndSkipSectionsClusterFeatures() throws Exception { assertThat(parser.nextToken(), nullValue()); } + public void testParseRequireAndSkipSectionsCapabilities() throws Exception { + parser = createParser(YamlXContent.yamlXContent, """ + - requires: + capabilities: + - path: /a + - method: POST + path: /b + parameters: [param1, param2] + - method: PUT + path: /c + capabilities: [a, b, c] + reason: required to run test + - skip: + capabilities: + - path: /d + parameters: param1 + capabilities: a + reason: undesired if supported + """); + + var skipSectionBuilder = PrerequisiteSection.parseInternal(parser); + assertThat(skipSectionBuilder, notNullValue()); + assertThat( + skipSectionBuilder.requiredCapabilities, + contains( + new CapabilitiesCheck("GET", "/a", null, null), + new CapabilitiesCheck("POST", "/b", "param1,param2", null), + new CapabilitiesCheck("PUT", "/c", null, "a,b,c") + ) + ); + assertThat(skipSectionBuilder.skipCapabilities, contains(new CapabilitiesCheck("GET", "/d", "param1", "a"))); + + assertThat(parser.currentToken(), equalTo(XContentParser.Token.END_ARRAY)); + assertThat(parser.nextToken(), nullValue()); + } + public void testParseRequireAndSkipSectionMultipleClusterFeatures() throws Exception { parser = createParser(YamlXContent.yamlXContent, """ - requires: @@ -659,6 +701,43 @@ public void testSkipKnownIssue() { assertFalse(section.skipCriteriaMet(mockContext)); } + public void testEvaluateCapabilities() { + List skipCapabilities = List.of( + new CapabilitiesCheck("GET", "/s", null, "c1,c2"), + new CapabilitiesCheck("GET", "/s", "p1,p2", "c1") + ); + List requiredCapabilities = List.of( + new CapabilitiesCheck("GET", "/r", null, null), + new CapabilitiesCheck("GET", "/r", "p1", null) + ); + PrerequisiteSection section = new PrerequisiteSection( + List.of(Prerequisites.skipCapabilities(skipCapabilities)), + "skip", + List.of(Prerequisites.requireCapabilities(requiredCapabilities)), + "required", + emptyList() + ); + + var context = mock(ClientYamlTestExecutionContext.class); + + // when the capabilities API is unavailable: + assertTrue(section.skipCriteriaMet(context)); // always skip if unavailable + assertFalse(section.requiresCriteriaMet(context)); // always fail requirements / skip if unavailable + + when(context.clusterHasCapabilities(anyString(), anyString(), any(), any())).thenReturn(Optional.of(FALSE)); + assertFalse(section.skipCriteriaMet(context)); + assertFalse(section.requiresCriteriaMet(context)); + + when(context.clusterHasCapabilities("GET", "/s", null, "c1,c2")).thenReturn(Optional.of(TRUE)); + assertTrue(section.skipCriteriaMet(context)); + + when(context.clusterHasCapabilities("GET", "/r", null, null)).thenReturn(Optional.of(TRUE)); + assertFalse(section.requiresCriteriaMet(context)); + + when(context.clusterHasCapabilities("GET", "/r", "p1", null)).thenReturn(Optional.of(TRUE)); + assertTrue(section.requiresCriteriaMet(context)); + } + public void evaluateEmpty() { var section = new PrerequisiteSection(List.of(), "unsupported", List.of(), "required", List.of()); From 7ed58e75dab2c36c99aabd78157be166c4ec322f Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Fri, 10 May 2024 13:35:00 +0200 Subject: [PATCH 045/119] Do not filter source if exclude contains `*` (#108501) This commit prevents the serialization of source if not needed. --- .../fetch/subphase/FetchSourcePhase.java | 11 +++++++--- .../search/lookup/SourceFilter.java | 4 ++++ .../fetch/subphase/FetchSourcePhaseTests.java | 21 +++++++++++++++++++ 3 files changed, 33 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java index 3b8e4e69d931..68e46186e450 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhase.java @@ -28,7 +28,7 @@ public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext) { } assert fetchSourceContext.fetchSource(); SourceFilter sourceFilter = fetchSourceContext.filter(); - + final boolean filterExcludesAll = sourceFilter.excludesAll(); return new FetchSubPhaseProcessor() { private int fastPath; @@ -67,8 +67,13 @@ private void hitExecute(FetchSourceContext fetchSourceContext, HitContext hitCon return; } - // Otherwise, filter the source and add it to the hit. - source = source.filter(sourceFilter); + if (filterExcludesAll) { + // we can just add an empty map + source = Source.empty(source.sourceContentType()); + } else { + // Otherwise, filter the source and add it to the hit. + source = source.filter(sourceFilter); + } if (nestedHit) { source = extractNested(source, hitContext.hit().getNestedIdentity()); } diff --git a/server/src/main/java/org/elasticsearch/search/lookup/SourceFilter.java b/server/src/main/java/org/elasticsearch/search/lookup/SourceFilter.java index 3bf32159c167..ceffb32c08b4 100644 --- a/server/src/main/java/org/elasticsearch/search/lookup/SourceFilter.java +++ b/server/src/main/java/org/elasticsearch/search/lookup/SourceFilter.java @@ -109,4 +109,8 @@ private Function buildBytesFilter() { } }; } + + public boolean excludesAll() { + return Arrays.asList(excludes).contains("*"); + } } diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java index 3a4d67ae281f..2b8bf0dad65f 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java @@ -52,6 +52,27 @@ public void testBasicFiltering() throws IOException { assertEquals(Collections.singletonMap("field1", "value"), hitContext.hit().getSourceAsMap()); } + public void testExcludesAll() throws IOException { + XContentBuilder source = XContentFactory.jsonBuilder().startObject().field("field1", "value").field("field2", "value2").endObject(); + HitContext hitContext = hitExecute(source, false, null, null); + assertNull(hitContext.hit().getSourceAsMap()); + + hitContext = hitExecute(source, true, "field1", "*"); + assertEquals(Collections.emptyMap(), hitContext.hit().getSourceAsMap()); + + hitContext = hitExecute(source, true, null, "*"); + assertEquals(Collections.emptyMap(), hitContext.hit().getSourceAsMap()); + + hitContext = hitExecute(source, true, "*", "*"); + assertEquals(Collections.emptyMap(), hitContext.hit().getSourceAsMap()); + + hitContext = hitExecuteMultiple(source, true, new String[] { "field1", "field2" }, new String[] { "*", "field1" }); + assertEquals(Collections.emptyMap(), hitContext.hit().getSourceAsMap()); + + hitContext = hitExecuteMultiple(source, true, null, new String[] { "field2", "*", "field1" }); + assertEquals(Collections.emptyMap(), hitContext.hit().getSourceAsMap()); + } + public void testMultipleFiltering() throws IOException { XContentBuilder source = XContentFactory.jsonBuilder().startObject().field("field", "value").field("field2", "value2").endObject(); HitContext hitContext = hitExecuteMultiple(source, true, new String[] { "*.notexisting", "field" }, null); From fed808850d708ba4be5190ac2abc3c47d8d2d379 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Fri, 10 May 2024 14:28:19 +0200 Subject: [PATCH 046/119] ES|QL: Add unit tests for now() function (#108498) --- .../functions/date-time-functions.asciidoc | 2 +- .../esql/functions/description/now.asciidoc | 5 ++ .../esql/functions/examples/now.asciidoc | 22 ++++++ .../esql/functions/kibana/definition/now.json | 16 +++++ .../esql/functions/kibana/docs/now.md | 10 +++ .../esql/functions/layout/now.asciidoc | 15 ++++ docs/reference/esql/functions/now.asciidoc | 28 -------- .../esql/functions/parameters/now.asciidoc | 3 + .../esql/functions/signature/now.svg | 1 + .../esql/functions/types/now.asciidoc | 9 +++ .../function/scalar/math/NowTests.java | 68 +++++++++++++++++++ 11 files changed, 150 insertions(+), 29 deletions(-) create mode 100644 docs/reference/esql/functions/description/now.asciidoc create mode 100644 docs/reference/esql/functions/examples/now.asciidoc create mode 100644 docs/reference/esql/functions/kibana/definition/now.json create mode 100644 docs/reference/esql/functions/kibana/docs/now.md create mode 100644 docs/reference/esql/functions/layout/now.asciidoc delete mode 100644 docs/reference/esql/functions/now.asciidoc create mode 100644 docs/reference/esql/functions/parameters/now.asciidoc create mode 100644 docs/reference/esql/functions/signature/now.svg create mode 100644 docs/reference/esql/functions/types/now.asciidoc create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/NowTests.java diff --git a/docs/reference/esql/functions/date-time-functions.asciidoc b/docs/reference/esql/functions/date-time-functions.asciidoc index 8ce26eaabe38..eceb6378426a 100644 --- a/docs/reference/esql/functions/date-time-functions.asciidoc +++ b/docs/reference/esql/functions/date-time-functions.asciidoc @@ -21,4 +21,4 @@ include::layout/date_extract.asciidoc[] include::layout/date_format.asciidoc[] include::layout/date_parse.asciidoc[] include::layout/date_trunc.asciidoc[] -include::now.asciidoc[] +include::layout/now.asciidoc[] diff --git a/docs/reference/esql/functions/description/now.asciidoc b/docs/reference/esql/functions/description/now.asciidoc new file mode 100644 index 000000000000..4852c98b4980 --- /dev/null +++ b/docs/reference/esql/functions/description/now.asciidoc @@ -0,0 +1,5 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Description* + +Returns current date and time. diff --git a/docs/reference/esql/functions/examples/now.asciidoc b/docs/reference/esql/functions/examples/now.asciidoc new file mode 100644 index 000000000000..b8953de93724 --- /dev/null +++ b/docs/reference/esql/functions/examples/now.asciidoc @@ -0,0 +1,22 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Examples* + +[source.merge.styled,esql] +---- +include::{esql-specs}/date.csv-spec[tag=docsNow] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/date.csv-spec[tag=docsNow-result] +|=== +To retrieve logs from the last hour: +[source.merge.styled,esql] +---- +include::{esql-specs}/date.csv-spec[tag=docsNowWhere] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/date.csv-spec[tag=docsNowWhere-result] +|=== + diff --git a/docs/reference/esql/functions/kibana/definition/now.json b/docs/reference/esql/functions/kibana/definition/now.json new file mode 100644 index 000000000000..9cdb4945afa2 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/now.json @@ -0,0 +1,16 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "eval", + "name" : "now", + "description" : "Returns current date and time.", + "signatures" : [ + { + "params" : [ ], + "returnType" : "datetime" + } + ], + "examples" : [ + "ROW current_date = NOW()", + "FROM sample_data\n| WHERE @timestamp > NOW() - 1 hour" + ] +} diff --git a/docs/reference/esql/functions/kibana/docs/now.md b/docs/reference/esql/functions/kibana/docs/now.md new file mode 100644 index 000000000000..5143dc843ebd --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/now.md @@ -0,0 +1,10 @@ + + +### NOW +Returns current date and time. + +``` +ROW current_date = NOW() +``` diff --git a/docs/reference/esql/functions/layout/now.asciidoc b/docs/reference/esql/functions/layout/now.asciidoc new file mode 100644 index 000000000000..52341c166561 --- /dev/null +++ b/docs/reference/esql/functions/layout/now.asciidoc @@ -0,0 +1,15 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +[discrete] +[[esql-now]] +=== `NOW` + +*Syntax* + +[.text-center] +image::esql/functions/signature/now.svg[Embedded,opts=inline] + +include::../parameters/now.asciidoc[] +include::../description/now.asciidoc[] +include::../types/now.asciidoc[] +include::../examples/now.asciidoc[] diff --git a/docs/reference/esql/functions/now.asciidoc b/docs/reference/esql/functions/now.asciidoc deleted file mode 100644 index 3c46f557acd1..000000000000 --- a/docs/reference/esql/functions/now.asciidoc +++ /dev/null @@ -1,28 +0,0 @@ -[discrete] -[[esql-now]] -=== `NOW` - -*Syntax* - -[source,esql] ----- -NOW() ----- - -*Description* - -Returns current date and time. - -*Example* - -[source,esql] ----- -include::{esql-specs}/date.csv-spec[tag=docsNow] ----- - -To retrieve logs from the last hour: - -[source,esql] ----- -include::{esql-specs}/date.csv-spec[tag=docsNowWhere] ----- \ No newline at end of file diff --git a/docs/reference/esql/functions/parameters/now.asciidoc b/docs/reference/esql/functions/parameters/now.asciidoc new file mode 100644 index 000000000000..25b3c973f1a2 --- /dev/null +++ b/docs/reference/esql/functions/parameters/now.asciidoc @@ -0,0 +1,3 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Parameters* diff --git a/docs/reference/esql/functions/signature/now.svg b/docs/reference/esql/functions/signature/now.svg new file mode 100644 index 000000000000..2cd48ac56140 --- /dev/null +++ b/docs/reference/esql/functions/signature/now.svg @@ -0,0 +1 @@ +NOW() \ No newline at end of file diff --git a/docs/reference/esql/functions/types/now.asciidoc b/docs/reference/esql/functions/types/now.asciidoc new file mode 100644 index 000000000000..5737d98f2f7d --- /dev/null +++ b/docs/reference/esql/functions/types/now.asciidoc @@ -0,0 +1,9 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +result +datetime +|=== diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/NowTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/NowTests.java new file mode 100644 index 000000000000..b4f195c5929e --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/NowTests.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.xpack.esql.EsqlTestUtils; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractConfigurationFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.scalar.date.Now; +import org.elasticsearch.xpack.esql.session.EsqlConfiguration; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.function.Supplier; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.matchesPattern; + +public class NowTests extends AbstractConfigurationFunctionTestCase { + public NowTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + return parameterSuppliersFromTypedData( + List.of( + new TestCaseSupplier( + "Now Test", + () -> new TestCaseSupplier.TestCase( + List.of(), + matchesPattern("LiteralsEvaluator\\[lit=.*\\]"), + DataTypes.DATETIME, + equalTo(EsqlTestUtils.TEST_CFG.now().toInstant().toEpochMilli()) + ) + ) + ) + ); + } + + @Override + protected Expression buildWithConfiguration(Source source, List args, EsqlConfiguration configuration) { + return new Now(Source.EMPTY, configuration); + } + + @Override + protected void assertSimpleWithNulls(List data, Block value, int nullBlock) { + assertThat(((LongBlock) value).asVector().getLong(0), equalTo(EsqlTestUtils.TEST_CFG.now().toInstant().toEpochMilli())); + } + + @Override + protected Matcher allNullsMatcher() { + return equalTo(EsqlTestUtils.TEST_CFG.now().toInstant().toEpochMilli()); + } + +} From ac102e53f3d5eb318e682101b2060cba7ae90936 Mon Sep 17 00:00:00 2001 From: Dianna Hohensee Date: Fri, 10 May 2024 08:33:28 -0400 Subject: [PATCH 047/119] Improve join NotMasterException response, and add class documentation (#108107) The NotMasterException response to a join request is difficult to use to diagnose a failed join attempt. Enhancing the NotMasterException to include what node is thought to be master and the current term. This additional information will help readers locate the real master, to go look at those logs. The additional class documentation on JoinHelper and ClusterFormationFailureHelper should improve comprehension of the circumstances of error message logs. --- .../ClusterFormationFailureHelper.java | 23 ++++++++++++++- .../cluster/coordination/JoinHelper.java | 29 +++++++++++++++++-- .../coordination/NodeJoinExecutor.java | 10 ++++++- 3 files changed, 58 insertions(+), 4 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java b/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java index c2cd40383659..e81d8d73af9a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java @@ -43,9 +43,16 @@ import static org.elasticsearch.cluster.coordination.ClusterBootstrapService.INITIAL_MASTER_NODES_SETTING; import static org.elasticsearch.monitor.StatusInfo.Status.UNHEALTHY; +/** + * Handles periodic debug logging of information regarding why the cluster has failed to form. + * Periodic logging begins once {@link #start()} is called, and ceases on {@link #stop()}. + */ public class ClusterFormationFailureHelper { private static final Logger logger = LogManager.getLogger(ClusterFormationFailureHelper.class); + /** + * This time period controls how often warning log messages will be written if this node fails to join or form a cluster. + */ public static final Setting DISCOVERY_CLUSTER_FORMATION_WARNING_TIMEOUT_SETTING = Setting.timeSetting( "discovery.cluster_formation_warning_timeout", TimeValue.timeValueMillis(10000), @@ -61,6 +68,16 @@ public class ClusterFormationFailureHelper { @Nullable // if no warning is scheduled private volatile WarningScheduler warningScheduler; + /** + * Works with the {@link JoinHelper} to log the latest node-join attempt failure and cluster state debug information. Must call + * {@link ClusterFormationState#start()} to begin. + * + * @param settings provides the period in which to log cluster formation errors. + * @param clusterFormationStateSupplier information about the current believed cluster state (See {@link ClusterFormationState}) + * @param threadPool the thread pool on which to run debug logging + * @param logLastFailedJoinAttempt invokes an instance of the JoinHelper to log the last encountered join failure + * (See {@link JoinHelper#logLastFailedJoinAttempt()}) + */ public ClusterFormationFailureHelper( Settings settings, Supplier clusterFormationStateSupplier, @@ -78,6 +95,10 @@ public boolean isRunning() { return warningScheduler != null; } + /** + * Schedules a warning debug message to be logged in 'clusterFormationWarningTimeout' time, and periodically thereafter, until + * {@link ClusterFormationState#stop()} has been called. + */ public void start() { assert warningScheduler == null; warningScheduler = new WarningScheduler(); @@ -129,7 +150,7 @@ public String toString() { } /** - * If this node believes that cluster formation has failed, this record provides information that can be used to determine why that is. + * This record provides node state information that can be used to determine why cluster formation has failed. */ public record ClusterFormationState( List initialMasterNodesSetting, diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java index b960bb02ceb7..059400ad81cf 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java @@ -194,13 +194,23 @@ private void unregisterAndReleaseConnection(DiscoveryNode destination, Releasabl Releasables.close(connectionReference); } - // package-private for testing + /** + * Saves information about a join failure. The failure information may be logged later via either {@link FailedJoinAttempt#logNow} + * or {@link FailedJoinAttempt#lastFailedJoinAttempt}. + * + * Package-private for testing. + */ static class FailedJoinAttempt { private final DiscoveryNode destination; private final JoinRequest joinRequest; private final ElasticsearchException exception; private final long timestamp; + /** + * @param destination the master node targeted by the join request. + * @param joinRequest the join request that was sent to the perceived master node. + * @param exception the error response received in reply to the join request attempt. + */ FailedJoinAttempt(DiscoveryNode destination, JoinRequest joinRequest, ElasticsearchException exception) { this.destination = destination; this.joinRequest = joinRequest; @@ -208,10 +218,18 @@ static class FailedJoinAttempt { this.timestamp = System.nanoTime(); } + /** + * Logs the failed join attempt exception. + * {@link FailedJoinAttempt#getLogLevel(ElasticsearchException)} determines at what log-level the log is written. + */ void logNow() { logger.log(getLogLevel(exception), () -> format("failed to join %s with %s", destination, joinRequest), exception); } + /** + * Returns the appropriate log level based on the given exception. Every error is at least DEBUG, but unexpected errors are INFO. + * For example, NotMasterException and CircuitBreakingExceptions are DEBUG logs. + */ static Level getLogLevel(ElasticsearchException e) { Throwable cause = e.unwrapCause(); if (cause instanceof CoordinationStateRejectedException @@ -226,6 +244,10 @@ void logWarnWithTimestamp() { logger.warn( () -> format( "last failed join attempt was %s ago, failed to join %s with %s", + // 'timestamp' is when this error exception was received by the local node. If the time that has passed since the error + // was originally received is quite large, it could indicate that this is a stale error exception from some prior + // out-of-order request response (where a later sent request but earlier received response was successful); or + // alternatively an old error could indicate that this node did not retry the join request for a very long time. TimeValue.timeValueMillis(TimeValue.nsecToMSec(System.nanoTime() - timestamp)), destination, joinRequest @@ -235,6 +257,9 @@ void logWarnWithTimestamp() { } } + /** + * Logs a warning message if {@link #lastFailedJoinAttempt} has been set with a failure. + */ void logLastFailedJoinAttempt() { FailedJoinAttempt attempt = lastFailedJoinAttempt.get(); if (attempt != null) { @@ -247,7 +272,7 @@ public void sendJoinRequest(DiscoveryNode destination, long term, Optional assert destination.isMasterNode() : "trying to join master-ineligible " + destination; final StatusInfo statusInfo = nodeHealthService.getHealth(); if (statusInfo.getStatus() == UNHEALTHY) { - logger.debug("dropping join request to [{}]: [{}]", destination, statusInfo.getInfo()); + logger.debug("dropping join request to [{}], unhealthy status: [{}]", destination, statusInfo.getInfo()); return; } final JoinRequest joinRequest = new JoinRequest( diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java b/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java index 2c024063e239..9223e02fc946 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.version.CompatibilityVersions; import org.elasticsearch.common.Priority; +import org.elasticsearch.common.Strings; import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; @@ -123,7 +124,14 @@ public ClusterState execute(BatchExecutionContext batchExecutionContex newState = ClusterState.builder(initialState); } else { logger.trace("processing node joins, but we are not the master. current master: {}", currentNodes.getMasterNode()); - throw new NotMasterException("Node [" + currentNodes.getLocalNode() + "] not master for join request"); + throw new NotMasterException( + Strings.format( + "Node [%s] not master for join request. Current known master [%s], current term [%d]", + currentNodes.getLocalNode(), + currentNodes.getMasterNode(), + term + ) + ); } DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(newState.nodes()); From 79032ec77eb5227bdb6eef37df7a4a6d35d98912 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Przemys=C5=82aw=20Witek?= Date: Fri, 10 May 2024 14:46:33 +0200 Subject: [PATCH 048/119] Do not use global ordinals strategy if the leaf reader context cannot be obtained (#108459) --- docs/changelog/108459.yaml | 6 ++++++ .../FrequentItemSetCollector.java | 12 +++++++----- .../mr/ItemSetMapReduceAggregator.java | 18 ++++++++---------- .../mr/ItemSetMapReduceValueSource.java | 12 +++++++----- 4 files changed, 28 insertions(+), 20 deletions(-) create mode 100644 docs/changelog/108459.yaml diff --git a/docs/changelog/108459.yaml b/docs/changelog/108459.yaml new file mode 100644 index 000000000000..5e05797f284b --- /dev/null +++ b/docs/changelog/108459.yaml @@ -0,0 +1,6 @@ +pr: 108459 +summary: Do not use global ordinals strategy if the leaf reader context cannot be + obtained +area: Machine Learning +type: bug +issues: [] diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetCollector.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetCollector.java index 18086748d6fe..bd80e362f2f7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetCollector.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetCollector.java @@ -177,7 +177,8 @@ FrequentItemSet toFrequentItemSet(List fields) throws IOException { int pos = items.nextSetBit(0); while (pos > 0) { Tuple item = transactionStore.getItem(topItemIds.getItemIdAt(pos - 1)); - assert item.v1() < fields.size() : "item id exceed number of given items, did you configure eclat correctly?"; + assert item.v1() < fields.size() + : "eclat error: item id (" + item.v1() + ") exceeds the number of given items (" + fields.size() + ")"; final Field field = fields.get(item.v1()); Object formattedValue = field.formatValue(item.v2()); String fieldName = fields.get(item.v1()).getName(); @@ -252,19 +253,20 @@ public FrequentItemSetCollector(TransactionStore transactionStore, TopItemIds to this.topItemIds = topItemIds; this.size = size; this.min = min; - queue = new FrequentItemSetPriorityQueue(size); - frequentItemsByCount = Maps.newMapWithExpectedSize(size / 10); + this.queue = new FrequentItemSetPriorityQueue(size); + this.frequentItemsByCount = Maps.newMapWithExpectedSize(size / 10); } public FrequentItemSet[] finalizeAndGetResults(List fields) throws IOException { - FrequentItemSet[] topFrequentItems = new FrequentItemSet[size()]; + FrequentItemSet[] topFrequentItems = new FrequentItemSet[queue.size()]; for (int i = topFrequentItems.length - 1; i >= 0; i--) { topFrequentItems[i] = queue.pop().toFrequentItemSet(fields); } return topFrequentItems; } - public int size() { + // Visible for testing + int size() { return queue.size(); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceAggregator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceAggregator.java index 72bfb6f1f039..0f9555c77341 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceAggregator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceAggregator.java @@ -86,17 +86,15 @@ protected ItemSetMapReduceAggregator( boolean rewriteBasedOnOrdinals = false; - if (ctx.isPresent()) { - for (var c : configsAndValueFilters) { - ItemSetMapReduceValueSource e = context.getValuesSourceRegistry() - .getAggregator(registryKey, c.v1()) - .build(c.v1(), id++, c.v2(), ordinalOptimization, ctx.get()); - if (e.getField().getName() != null) { - fields.add(e.getField()); - valueSources.add(e); - } - rewriteBasedOnOrdinals |= e.usesOrdinals(); + for (var c : configsAndValueFilters) { + ItemSetMapReduceValueSource e = context.getValuesSourceRegistry() + .getAggregator(registryKey, c.v1()) + .build(c.v1(), id++, c.v2(), ordinalOptimization, ctx); + if (e.getField().getName() != null) { + fields.add(e.getField()); + valueSources.add(e); } + rewriteBasedOnOrdinals |= e.usesOrdinals(); } this.rewriteBasedOnOrdinals = rewriteBasedOnOrdinals; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceValueSource.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceValueSource.java index c9ec772eb332..08adecd3fbce 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceValueSource.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceValueSource.java @@ -37,6 +37,7 @@ import java.util.Collections; import java.util.List; import java.util.Objects; +import java.util.Optional; /** * Interface to extract values from Lucene in order to feed it into the MapReducer. @@ -53,7 +54,7 @@ ItemSetMapReduceValueSource build( int id, IncludeExclude includeExclude, AbstractItemSetMapReducer.OrdinalOptimization ordinalOptimization, - LeafReaderContext ctx + Optional ctx ) throws IOException; } @@ -345,20 +346,21 @@ public KeywordValueSource( int id, IncludeExclude includeExclude, AbstractItemSetMapReducer.OrdinalOptimization ordinalOptimization, - LeafReaderContext ctx + Optional ctx ) throws IOException { super(config, id, ValueFormatter.BYTES_REF); if (AbstractItemSetMapReducer.OrdinalOptimization.GLOBAL_ORDINALS.equals(ordinalOptimization) && config.getValuesSource() instanceof Bytes.WithOrdinals - && ((Bytes.WithOrdinals) config.getValuesSource()).supportsGlobalOrdinalsMapping()) { + && ((Bytes.WithOrdinals) config.getValuesSource()).supportsGlobalOrdinalsMapping() + && ctx.isPresent()) { logger.debug("Use ordinals for field [{}]", config.fieldContext().field()); this.executionStrategy = new GlobalOrdinalsStrategy( getField(), (Bytes.WithOrdinals) config.getValuesSource(), includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(config.format()), - ctx + ctx.get() ); } else { this.executionStrategy = new MapStrategy( @@ -394,7 +396,7 @@ public NumericValueSource( int id, IncludeExclude includeExclude, AbstractItemSetMapReducer.OrdinalOptimization unusedOrdinalOptimization, - LeafReaderContext unusedCtx + Optional unusedCtx ) { super(config, id, ValueFormatter.LONG); this.source = (Numeric) config.getValuesSource(); From 8d19849dc10b28244b506c131cfe9db6e6c4372d Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 10 May 2024 15:10:07 +0200 Subject: [PATCH 049/119] Fix potential leaks in search execution (#108391) Cleaning up some potentially leaky spots or at the very least making them easier to read. --- .../action/search/TransportSearchAction.java | 58 +++++++++++-------- .../search/internal/SearchContext.java | 1 + 2 files changed, 34 insertions(+), 25 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 51a8c6ddb3d7..a12d149bbe34 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -1303,8 +1303,8 @@ public SearchPhase newSearchPhase( task, true, searchService.getCoordinatorRewriteContextProvider(timeProvider::absoluteStartMillis), - listener.delegateFailureAndWrap((l, iters) -> { - SearchPhase action = newSearchPhase( + listener.delegateFailureAndWrap( + (l, iters) -> newSearchPhase( task, searchRequest, executor, @@ -1317,30 +1317,32 @@ public SearchPhase newSearchPhase( false, threadPool, clusters - ); - action.start(); - }) - ); - } else { - // for synchronous CCS minimize_roundtrips=false, use the CCSSingleCoordinatorSearchProgressListener - // (AsyncSearchTask will not return SearchProgressListener.NOOP, since it uses its own progress listener - // which delegates to CCSSingleCoordinatorSearchProgressListener when minimizing roundtrips) - if (clusters.isCcsMinimizeRoundtrips() == false - && clusters.hasRemoteClusters() - && task.getProgressListener() == SearchProgressListener.NOOP) { - task.setProgressListener(new CCSSingleCoordinatorSearchProgressListener()); - } - final SearchPhaseResults queryResultConsumer = searchPhaseController.newSearchPhaseResults( - executor, - circuitBreaker, - task::isCancelled, - task.getProgressListener(), - searchRequest, - shardIterators.size(), - exc -> searchTransportService.cancelSearchTask(task, "failed to merge result [" + exc.getMessage() + "]") + ).start() + ) ); + } + // for synchronous CCS minimize_roundtrips=false, use the CCSSingleCoordinatorSearchProgressListener + // (AsyncSearchTask will not return SearchProgressListener.NOOP, since it uses its own progress listener + // which delegates to CCSSingleCoordinatorSearchProgressListener when minimizing roundtrips) + if (clusters.isCcsMinimizeRoundtrips() == false + && clusters.hasRemoteClusters() + && task.getProgressListener() == SearchProgressListener.NOOP) { + task.setProgressListener(new CCSSingleCoordinatorSearchProgressListener()); + } + final SearchPhaseResults queryResultConsumer = searchPhaseController.newSearchPhaseResults( + executor, + circuitBreaker, + task::isCancelled, + task.getProgressListener(), + searchRequest, + shardIterators.size(), + exc -> searchTransportService.cancelSearchTask(task, "failed to merge result [" + exc.getMessage() + "]") + ); + boolean success = false; + try { + final SearchPhase searchPhase; if (searchRequest.searchType() == DFS_QUERY_THEN_FETCH) { - return new SearchDfsQueryThenFetchAsyncAction( + searchPhase = new SearchDfsQueryThenFetchAsyncAction( logger, namedWriteableRegistry, searchTransportService, @@ -1359,7 +1361,7 @@ public SearchPhase newSearchPhase( ); } else { assert searchRequest.searchType() == QUERY_THEN_FETCH : searchRequest.searchType(); - return new SearchQueryThenFetchAsyncAction( + searchPhase = new SearchQueryThenFetchAsyncAction( logger, namedWriteableRegistry, searchTransportService, @@ -1377,6 +1379,12 @@ public SearchPhase newSearchPhase( clusters ); } + success = true; + return searchPhase; + } finally { + if (success == false) { + queryResultConsumer.close(); + } } } } diff --git a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java index 232c12e944a9..35f96ee2dc10 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java @@ -351,6 +351,7 @@ public Query rewrittenQuery() { * Adds a releasable that will be freed when this context is closed. */ public void addReleasable(Releasable releasable) { // TODO most Releasables are managed by their callers. We probably don't need this. + assert closed.get() == false; releasables.add(releasable); } From 7e38ee13d593984406c9192c5ee31b6b351e99e7 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Fri, 10 May 2024 15:07:01 +0100 Subject: [PATCH 050/119] Mute capabilities yaml test (#108511) --- .../resources/rest-api-spec/test/capabilities/10_basic.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/capabilities/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/capabilities/10_basic.yml index 715e696bd103..04d60e31149e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/capabilities/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/capabilities/10_basic.yml @@ -1,6 +1,9 @@ --- "Capabilities API": + - skip: + awaits_fix: "https://github.com/elastic/elasticsearch/issues/108509" + - requires: capabilities: - method: GET From b6874a516076044c7937ed807270e320fcfc925a Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Fri, 10 May 2024 10:36:16 -0400 Subject: [PATCH 051/119] [Transform] Use unpooled SearchHits in tests (#108508) Fix #108507 --- .../transforms/TransformIndexerFailureHandlingTests.java | 2 +- .../xpack/transform/transforms/TransformIndexerStateTests.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java index f39a4329f2bb..ceb2507ad33a 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java @@ -992,7 +992,7 @@ private MockedTransformIndexer createMockIndexer( private static Function returnHit() { return request -> new SearchResponse( - new SearchHits(new SearchHit[] { new SearchHit(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), + SearchHits.unpooled(new SearchHit[] { SearchHit.unpooled(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), // Simulate completely null aggs null, new Suggest(Collections.emptyList()), diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java index a474976cf9df..01a2db839b7d 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java @@ -88,7 +88,7 @@ public class TransformIndexerStateTests extends ESTestCase { private static final SearchResponse ONE_HIT_SEARCH_RESPONSE = new SearchResponse( - new SearchHits(new SearchHit[] { new SearchHit(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), + SearchHits.unpooled(new SearchHit[] { SearchHit.unpooled(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), // Simulate completely null aggs null, new Suggest(Collections.emptyList()), From d6cb12ec8fc377b5ed50681304bf2cd33f43e62f Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Fri, 10 May 2024 16:29:01 +0100 Subject: [PATCH 052/119] Properly support capabilities checks with mixed version clusters (#108513) When a capabilities check is done on a cluster with some nodes that do not support capabilities, always return false --- .../upgrades/NodesCapabilitiesUpgradeIT.java | 68 +++++++++++++++++++ .../test/capabilities/10_basic.yml | 3 - .../NodesCapabilitiesResponse.java | 9 ++- .../TransportNodesCapabilitiesAction.java | 25 ++++++- .../org/elasticsearch/rest/RestFeatures.java | 7 ++ .../cluster/RestNodesCapabilitiesAction.java | 3 + .../test/rest/ESRestTestCase.java | 38 +++++++++++ 7 files changed, 147 insertions(+), 6 deletions(-) create mode 100644 qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/NodesCapabilitiesUpgradeIT.java diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/NodesCapabilitiesUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/NodesCapabilitiesUpgradeIT.java new file mode 100644 index 000000000000..2acaf33c2130 --- /dev/null +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/NodesCapabilitiesUpgradeIT.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.core.UpdateForV9; +import org.junit.Before; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresentWith; + +@UpdateForV9 +public class NodesCapabilitiesUpgradeIT extends AbstractRollingUpgradeTestCase { + + private static Boolean upgradingBeforeCapabilities; + + public NodesCapabilitiesUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + @Before + public void checkBeforeHasNoCapabilities() throws IOException { + if (upgradingBeforeCapabilities == null) { + // try to do a _capabilities query on a node before we upgrade + try { + clusterHasCapability("GET", "_capabilities", List.of(), List.of()); + upgradingBeforeCapabilities = false; + } catch (ResponseException e) { + if (e.getResponse().getStatusLine().getStatusCode() == 400) { + upgradingBeforeCapabilities = true; + } else { + throw e; + } + } + } + + assumeTrue("Only valid when upgrading from versions without capabilities API", upgradingBeforeCapabilities); + } + + public void testCapabilitiesReturnsFalsePartiallyUpgraded() throws IOException { + if (isMixedCluster()) { + // capabilities checks should either fail (if talking to an old node), + // or return false as not all nodes have the API (if talking to a new node) + try { + assertThat( + "Upgraded node should report no capabilities supported", + clusterHasCapability("GET", "_capabilities", List.of(), List.of()), + isPresentWith(false) + ); + } catch (ResponseException e) { + if (e.getResponse().getStatusLine().getStatusCode() != 400) { + // throw explicitly to capture exception too + throw new AssertionError("Old node should not have the capabilities API", e); + } + } + } + } +} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/capabilities/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/capabilities/10_basic.yml index 04d60e31149e..715e696bd103 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/capabilities/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/capabilities/10_basic.yml @@ -1,9 +1,6 @@ --- "Capabilities API": - - skip: - awaits_fix: "https://github.com/elastic/elasticsearch/issues/108509" - - requires: capabilities: - method: GET diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesResponse.java index c2acbf65f6e5..3527b8cc4684 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/NodesCapabilitiesResponse.java @@ -37,8 +37,13 @@ protected void writeNodesTo(StreamOutput out, List nodes) throws } public Optional isSupported() { - // if there are any failures, we don't know if it is fully supported by all nodes in the cluster - if (hasFailures() || getNodes().isEmpty()) return Optional.empty(); + if (hasFailures() || getNodes().isEmpty()) { + // there's no nodes in the response (uh? what about ourselves?) + // or there's a problem (hopefully transient) talking to one or more nodes. + // We don't have enough information to decide if it's supported or not, so return unknown + return Optional.empty(); + } + return Optional.of(getNodes().stream().allMatch(NodeCapability::isSupported)); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/TransportNodesCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/TransportNodesCapabilitiesAction.java index 7e392775bf42..71aa95908d3b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/TransportNodesCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/capabilities/TransportNodesCapabilitiesAction.java @@ -8,6 +8,7 @@ package org.elasticsearch.action.admin.cluster.node.capabilities; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; @@ -18,8 +19,10 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.admin.cluster.RestNodesCapabilitiesAction; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportRequest; @@ -27,6 +30,7 @@ import java.io.IOException; import java.util.List; +import java.util.Optional; import java.util.Set; public class TransportNodesCapabilitiesAction extends TransportNodesAction< @@ -38,6 +42,7 @@ public class TransportNodesCapabilitiesAction extends TransportNodesAction< public static final ActionType TYPE = new ActionType<>("cluster:monitor/nodes/capabilities"); private final RestController restController; + private final FeatureService featureService; @Inject public TransportNodesCapabilitiesAction( @@ -45,7 +50,8 @@ public TransportNodesCapabilitiesAction( ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, - RestController restController + RestController restController, + FeatureService featureService ) { super( TYPE.name(), @@ -56,6 +62,23 @@ public TransportNodesCapabilitiesAction( threadPool.executor(ThreadPool.Names.MANAGEMENT) ); this.restController = restController; + this.featureService = featureService; + } + + @Override + protected void doExecute(Task task, NodesCapabilitiesRequest request, ActionListener listener) { + if (featureService.clusterHasFeature(clusterService.state(), RestNodesCapabilitiesAction.CAPABILITIES_ACTION) == false) { + // not everything in the cluster supports capabilities. + // Therefore we don't support whatever it is we're being asked for + listener.onResponse(new NodesCapabilitiesResponse(clusterService.getClusterName(), List.of(), List.of()) { + @Override + public Optional isSupported() { + return Optional.of(false); + } + }); + } else { + super.doExecute(task, request, listener); + } } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/RestFeatures.java b/server/src/main/java/org/elasticsearch/rest/RestFeatures.java index 73b788d63b2a..111204fbe7fb 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestFeatures.java +++ b/server/src/main/java/org/elasticsearch/rest/RestFeatures.java @@ -12,10 +12,17 @@ import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.action.admin.cluster.RestClusterGetSettingsAction; +import org.elasticsearch.rest.action.admin.cluster.RestNodesCapabilitiesAction; import java.util.Map; +import java.util.Set; public class RestFeatures implements FeatureSpecification { + @Override + public Set getFeatures() { + return Set.of(RestNodesCapabilitiesAction.CAPABILITIES_ACTION); + } + @Override public Map getHistoricalFeatures() { return Map.of(RestClusterGetSettingsAction.SUPPORTS_GET_SETTINGS_ACTION, Version.V_8_3_0); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesCapabilitiesAction.java index 9b89a6a932dd..fae7903d02b8 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesCapabilitiesAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.admin.cluster.node.capabilities.NodesCapabilitiesRequest; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; +import org.elasticsearch.features.NodeFeature; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -26,6 +27,8 @@ @ServerlessScope(Scope.INTERNAL) public class RestNodesCapabilitiesAction extends BaseRestHandler { + public static final NodeFeature CAPABILITIES_ACTION = new NodeFeature("rest.capabilities_action"); + @Override public List routes() { return List.of(new Route(RestRequest.Method.GET, "/_capabilities")); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index fd3ba7d864f9..6dfd51c0bee5 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -105,6 +105,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Base64; +import java.util.Collection; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; @@ -261,6 +262,43 @@ protected static Set readVersionsFromNodesInfo(RestClient adminClient) t .collect(Collectors.toUnmodifiableMap(entry -> entry.getKey().toString(), entry -> (Map) entry.getValue())); } + /** + * Does the cluster being tested support the set of capabilities + * for specified path and method. + */ + protected static Optional clusterHasCapability( + String method, + String path, + Collection parameters, + Collection capabilities + ) throws IOException { + return clusterHasCapability(adminClient, method, path, parameters, capabilities); + } + + /** + * Does the cluster on the other side of {@code client} support the set + * of capabilities for specified path and method. + */ + protected static Optional clusterHasCapability( + RestClient client, + String method, + String path, + Collection parameters, + Collection capabilities + ) throws IOException { + Request request = new Request("GET", "_capabilities"); + request.addParameter("method", method); + request.addParameter("path", path); + if (parameters.isEmpty() == false) { + request.addParameter("parameters", String.join(",", parameters)); + } + if (capabilities.isEmpty() == false) { + request.addParameter("capabilities", String.join(",", capabilities)); + } + Map response = entityAsMap(client.performRequest(request).getEntity()); + return Optional.ofNullable((Boolean) response.get("supported")); + } + protected static boolean clusterHasFeature(String featureId) { return testFeatureService.clusterHasFeature(featureId); } From 5a824c5a38e51590064e410a292fa50cb0bb0d86 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Fri, 10 May 2024 17:39:04 +0200 Subject: [PATCH 053/119] [Inference API] Improve completion response entity tests (#108512) --- .../AzureOpenAiCompletionResponseEntityTests.java | 9 +++------ .../openai/OpenAiChatCompletionResponseEntityTests.java | 6 +++--- 2 files changed, 6 insertions(+), 9 deletions(-) diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntityTests.java index 3afe4bd439e0..ec76f43a6d52 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/azureopenai/AzureOpenAiCompletionResponseEntityTests.java @@ -17,7 +17,6 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.mock; @@ -50,7 +49,7 @@ public void testFromResponse_CreatesResultsForASingleItem() throws IOException { "index": 0, "logprobs": null, "message": { - "content": "response", + "content": "result", "role": "assistant" } } @@ -92,10 +91,8 @@ public void testFromResponse_CreatesResultsForASingleItem() throws IOException { new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); - assertThat(chatCompletionResults.getResults().size(), equalTo(1)); - - ChatCompletionResults.Result result = chatCompletionResults.getResults().get(0); - assertThat(result.asMap().get(result.getResultsField()), is("response")); + assertThat(chatCompletionResults.getResults().size(), is(1)); + assertThat(chatCompletionResults.getResults().get(0).content(), is("result")); } public void testFromResponse_FailsWhenChoicesFieldIsNotPresent() { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java index 080602e8fd24..5604d6573144 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiChatCompletionResponseEntityTests.java @@ -17,7 +17,6 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.mock; @@ -35,7 +34,7 @@ public void testFromResponse_CreatesResultsForASingleItem() throws IOException { "index": 0, "message": { "role": "assistant", - "content": "some content" + "content": "result" }, "logprobs": null, "finish_reason": "stop" @@ -55,7 +54,8 @@ public void testFromResponse_CreatesResultsForASingleItem() throws IOException { new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); - assertThat(chatCompletionResults.getResults().size(), equalTo(1)); + assertThat(chatCompletionResults.getResults().size(), is(1)); + assertThat(chatCompletionResults.getResults().get(0).content(), is("result")); } public void testFromResponse_FailsWhenChoicesFieldIsNotPresent() { From ef12b99284785b5877bf62193bdb6f40a0bde66f Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Fri, 10 May 2024 12:04:58 -0400 Subject: [PATCH 054/119] Fix random sampler consistency test (#107957) Random sampler consistency requires a restricted number of segments, to ensure we always hit the same number of segments and that no merging is occurring, this merges the segment count to 1 for this particular test. In practice, this isn't needed as the approximate nature of the aggregation already means you could get different statistics per call, but they are within an error bound set by the users configured sampling probability. closes: https://github.com/elastic/elasticsearch/issues/105839 --- .../search/aggregations/bucket/RandomSamplerIT.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java index c9a6cfaf754c..71402d3e9c1d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java @@ -22,6 +22,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.avg; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; @@ -83,9 +84,11 @@ public void setupSuiteScopeCluster() throws Exception { } indexRandom(true, builders); ensureSearchable(); + // Force merge to ensure segment consistency as any segment merging can change which particular documents + // are sampled + assertNoFailures(indicesAdmin().prepareForceMerge("idx").setMaxNumSegments(1).get()); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105839") public void testRandomSamplerConsistentSeed() { double[] sampleMonotonicValue = new double[1]; double[] sampleNumericValue = new double[1]; From 2d14095ebf66bf04881f04b7e7f92ba2834187f3 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 10 May 2024 09:51:56 -0700 Subject: [PATCH 055/119] Handle blocked cluster state in file settings (#108481) When file settings is first loaded on a master node starting up, the cluster state will likely be in a blocked state before it recovers. In that case the file settings will not be processable since the metadata will be missing in cluster state. This commit makes watching for file settings not start until the cluster state is in a recovered state. It also updates the the reserved state update task to handle a similar case where a task may be queued and then run at time when the node is no longer master, but before the watcher is stopped. --- .../service/FileSettingsServiceIT.java | 2 +- .../file/MasterNodeFileWatchingService.java | 4 +- .../service/ReservedStateUpdateTask.java | 8 ++ .../MasterNodeFileWatchingServiceTests.java | 119 ++++++++++++++++++ .../service/ReservedStateUpdateTaskTests.java | 31 +++++ 5 files changed, 162 insertions(+), 2 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/common/file/MasterNodeFileWatchingServiceTests.java create mode 100644 server/src/test/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTaskTests.java diff --git a/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/FileSettingsServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/FileSettingsServiceIT.java index 58dcfdaec514..38bc372868df 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/FileSettingsServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/reservedstate/service/FileSettingsServiceIT.java @@ -243,7 +243,7 @@ public void testReservedStatePersistsOnRestart() throws Exception { FileSettingsService masterFileSettingsService = internalCluster().getInstance(FileSettingsService.class, masterNode); - assertTrue(masterFileSettingsService.watching()); + assertBusy(() -> assertTrue(masterFileSettingsService.watching())); logger.info("--> write some settings"); writeJSONFile(masterNode, testJSON); diff --git a/server/src/main/java/org/elasticsearch/common/file/MasterNodeFileWatchingService.java b/server/src/main/java/org/elasticsearch/common/file/MasterNodeFileWatchingService.java index 6da0845a7c7b..65bfa804cec2 100644 --- a/server/src/main/java/org/elasticsearch/common/file/MasterNodeFileWatchingService.java +++ b/server/src/main/java/org/elasticsearch/common/file/MasterNodeFileWatchingService.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.gateway.GatewayService; import java.io.IOException; import java.nio.file.Files; @@ -58,7 +59,8 @@ protected void doStop() { @Override public final void clusterChanged(ClusterChangedEvent event) { ClusterState clusterState = event.state(); - if (clusterState.nodes().isLocalNodeElectedMaster()) { + if (clusterState.nodes().isLocalNodeElectedMaster() + && clusterState.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK) == false) { synchronized (this) { if (watching() || active == false) { refreshExistingFileStateIfNeeded(clusterState); diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java index 535758ed71ea..1ac42a91736c 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTask.java @@ -19,6 +19,7 @@ import org.elasticsearch.cluster.metadata.ReservedStateErrorMetadata; import org.elasticsearch.cluster.metadata.ReservedStateHandlerMetadata; import org.elasticsearch.cluster.metadata.ReservedStateMetadata; +import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.reservedstate.NonStateTransformResult; import org.elasticsearch.reservedstate.ReservedClusterStateHandler; import org.elasticsearch.reservedstate.TransformState; @@ -80,6 +81,13 @@ ActionListener listener() { } protected ClusterState execute(final ClusterState currentState) { + if (currentState.blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) { + // If cluster state has become blocked, this task was submitted while the node was master but is now not master. + // The new master will re-read file settings, so whatever update was to be written here will be handled + // by the new master. + return currentState; + } + ReservedStateMetadata existingMetadata = currentState.metadata().reservedStateMetadata().get(namespace); Map reservedState = stateChunk.state(); ReservedStateVersion reservedStateVersion = stateChunk.metadata(); diff --git a/server/src/test/java/org/elasticsearch/common/file/MasterNodeFileWatchingServiceTests.java b/server/src/test/java/org/elasticsearch/common/file/MasterNodeFileWatchingServiceTests.java new file mode 100644 index 000000000000..f92097f53bb8 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/common/file/MasterNodeFileWatchingServiceTests.java @@ -0,0 +1,119 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.file; + +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlocks; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.cluster.node.DiscoveryNodeUtils; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.gateway.GatewayService; +import org.elasticsearch.node.NodeRoleSettings; +import org.elasticsearch.test.ESTestCase; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.nio.file.Path; +import java.util.concurrent.ExecutionException; + +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class MasterNodeFileWatchingServiceTests extends ESTestCase { + + static final DiscoveryNode localNode = DiscoveryNodeUtils.create("local-node"); + MasterNodeFileWatchingService testService; + Path watchedFile; + Runnable fileChangedCallback; + + @Before + public void setupTestService() throws IOException { + watchedFile = createTempFile(); + ClusterService clusterService = mock(ClusterService.class); + Settings settings = Settings.builder() + .put(NodeRoleSettings.NODE_ROLES_SETTING.getKey(), DiscoveryNodeRole.MASTER_ROLE.roleName()) + .build(); + when(clusterService.getSettings()).thenReturn(settings); + fileChangedCallback = () -> {}; + testService = new MasterNodeFileWatchingService(clusterService, watchedFile) { + + @Override + protected void processFileChanges() throws InterruptedException, ExecutionException, IOException { + fileChangedCallback.run(); + } + + @Override + protected void processInitialFileMissing() throws InterruptedException, ExecutionException, IOException { + // file always exists, but we don't care about the missing case for master node behavior + } + }; + testService.start(); + } + + @After + public void stopTestService() { + testService.stop(); + } + + public void testBecomingMasterNodeStartsWatcher() { + ClusterState notRecoveredClusterState = ClusterState.builder(ClusterName.DEFAULT) + .nodes(DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId()).masterNodeId(localNode.getId())) + .blocks(ClusterBlocks.builder().addGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) + .build(); + testService.clusterChanged(new ClusterChangedEvent("test", notRecoveredClusterState, ClusterState.EMPTY_STATE)); + // just a master node isn't sufficient, cluster state also must be recovered + assertThat(testService.watching(), is(false)); + + ClusterState recoveredClusterState = ClusterState.builder(notRecoveredClusterState) + .blocks(ClusterBlocks.EMPTY_CLUSTER_BLOCK) + .build(); + testService.clusterChanged(new ClusterChangedEvent("test", recoveredClusterState, notRecoveredClusterState)); + // just a master node isn't sufficient, cluster state also must be recovered + assertThat(testService.watching(), is(true)); + } + + public void testChangingMasterStopsWatcher() { + ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) + .nodes(DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId()).masterNodeId(localNode.getId())) + .build(); + testService.clusterChanged(new ClusterChangedEvent("test", clusterState, ClusterState.EMPTY_STATE)); + assertThat(testService.watching(), is(true)); + + final DiscoveryNode anotherNode = DiscoveryNodeUtils.create("another-node"); + ClusterState differentMasterClusterState = ClusterState.builder(ClusterName.DEFAULT) + .nodes( + DiscoveryNodes.builder().add(localNode).add(anotherNode).localNodeId(localNode.getId()).masterNodeId(anotherNode.getId()) + ) + .build(); + testService.clusterChanged(new ClusterChangedEvent("test", differentMasterClusterState, clusterState)); + assertThat(testService.watching(), is(false)); + } + + public void testBlockingClusterStateStopsWatcher() { + ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) + .nodes(DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId()).masterNodeId(localNode.getId())) + .build(); + testService.clusterChanged(new ClusterChangedEvent("test", clusterState, ClusterState.EMPTY_STATE)); + assertThat(testService.watching(), is(true)); + + ClusterState blockedClusterState = ClusterState.builder(ClusterName.DEFAULT) + .nodes(DiscoveryNodes.builder().add(localNode).localNodeId(localNode.getId()).masterNodeId(localNode.getId())) + .blocks(ClusterBlocks.builder().addGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) + .build(); + testService.clusterChanged(new ClusterChangedEvent("test", blockedClusterState, clusterState)); + assertThat(testService.watching(), is(false)); + } +} diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTaskTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTaskTests.java new file mode 100644 index 000000000000..d887d7edb19f --- /dev/null +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/ReservedStateUpdateTaskTests.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.reservedstate.service; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlocks; +import org.elasticsearch.gateway.GatewayService; +import org.elasticsearch.test.ESTestCase; + +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.sameInstance; + +public class ReservedStateUpdateTaskTests extends ESTestCase { + public void testBlockedClusterState() { + var task = new ReservedStateUpdateTask("dummy", null, List.of(), Map.of(), List.of(), e -> {}, ActionListener.noop()); + ClusterState notRecoveredClusterState = ClusterState.builder(ClusterName.DEFAULT) + .blocks(ClusterBlocks.builder().addGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) + .build(); + assertThat(task.execute(notRecoveredClusterState), sameInstance(notRecoveredClusterState)); + } +} From 04d3b9989fe03d3591d9eb637dc54f7813e174ea Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 10 May 2024 13:45:42 -0400 Subject: [PATCH 056/119] ESQL: Begin optimizing `Block#lookup` (#108482) This creates the infrastructure to allow optimizing the `lookup` method when applied to `Vector`s and then implements that optimization for constant vectors. Constant vectors now take one of six paths: 1. An empty positions `Block` yields an empty result set. 2. If `positions` is a `Block`, perform the un-optimized lookup. 3. If the `min` of the `positions` *Vector* is less that 0 then throw an exception. 4. If the `min` of the positions Vector is greater than the number of positions in the lookup block then return a single `ConstantNullBlock` because you are looking up outside the range. 5. If the `max` of the positions Vector is less than the number of positions in the lookup block then return a `Constant$Type$Block` with the same value as the lookup block. This is a lookup that's entirely within range. 6. Otherwise return the unoptimized lookup. This is *fairly* simple but demonstrates how we can plug in more complex optimizations later. --- .../core/ReleasableIterator.java | 26 ++++++ .../compute/data/BooleanArrayVector.java | 7 ++ .../compute/data/BooleanBigArrayVector.java | 7 ++ .../compute/data/BooleanVector.java | 5 ++ .../compute/data/BooleanVectorBlock.java | 5 +- .../compute/data/BytesRefArrayVector.java | 7 ++ .../compute/data/BytesRefVector.java | 5 ++ .../compute/data/BytesRefVectorBlock.java | 5 +- .../compute/data/ConstantBooleanVector.java | 24 +++++ .../compute/data/ConstantBytesRefVector.java | 24 +++++ .../compute/data/ConstantDoubleVector.java | 24 +++++ .../compute/data/ConstantIntVector.java | 24 +++++ .../compute/data/ConstantLongVector.java | 24 +++++ .../compute/data/DoubleArrayVector.java | 7 ++ .../compute/data/DoubleBigArrayVector.java | 7 ++ .../compute/data/DoubleVector.java | 5 ++ .../compute/data/DoubleVectorBlock.java | 5 +- .../compute/data/IntArrayVector.java | 7 ++ .../compute/data/IntBigArrayVector.java | 7 ++ .../elasticsearch/compute/data/IntVector.java | 5 ++ .../compute/data/IntVectorBlock.java | 5 +- .../compute/data/LongArrayVector.java | 7 ++ .../compute/data/LongBigArrayVector.java | 7 ++ .../compute/data/LongVector.java | 5 ++ .../compute/data/LongVectorBlock.java | 5 +- .../org/elasticsearch/compute/data/Block.java | 8 +- .../compute/data/ConstantNullVector.java | 8 ++ .../elasticsearch/compute/data/DocBlock.java | 2 +- .../elasticsearch/compute/data/DocVector.java | 7 ++ .../compute/data/OrdinalBytesRefVector.java | 7 ++ .../elasticsearch/compute/data/Vector.java | 29 ++++++ .../compute/data/X-ArrayVector.java.st | 9 ++ .../compute/data/X-BigArrayVector.java.st | 7 ++ .../compute/data/X-ConstantVector.java.st | 24 +++++ .../compute/data/X-Vector.java.st | 5 ++ .../compute/data/X-VectorBlock.java.st | 5 +- .../compute/data/BasicBlockTests.java | 89 +++++++++++++++++-- 37 files changed, 431 insertions(+), 28 deletions(-) diff --git a/libs/core/src/main/java/org/elasticsearch/core/ReleasableIterator.java b/libs/core/src/main/java/org/elasticsearch/core/ReleasableIterator.java index 68a4a136c530..83a68c984a68 100644 --- a/libs/core/src/main/java/org/elasticsearch/core/ReleasableIterator.java +++ b/libs/core/src/main/java/org/elasticsearch/core/ReleasableIterator.java @@ -46,4 +46,30 @@ public String toString() { }; } + + /** + * Returns an empty iterator over the supplied value. + */ + static ReleasableIterator empty() { + return new ReleasableIterator<>() { + @Override + public boolean hasNext() { + return false; + } + + @Override + public T next() { + assert false : "hasNext is always false so next should never be called"; + return null; + } + + @Override + public void close() {} + + @Override + public String toString() { + return "ReleasableIterator[]"; + } + }; + } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java index e195bda3a6db..a91999a49c16 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java @@ -10,6 +10,8 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; import java.util.stream.Collectors; @@ -91,6 +93,11 @@ public BooleanVector filter(int... positions) { } } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new BooleanLookup(asBlock(), positions, targetBlockSize); + } + public static long ramBytesEstimated(boolean[] values) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java index 5f6db129e73d..9215cd0d9bbd 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java @@ -10,8 +10,10 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -87,6 +89,11 @@ public BooleanVector filter(int... positions) { return new BooleanBigArrayVector(filtered, positions.length, blockFactory); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new BooleanLookup(asBlock(), positions, targetBlockSize); + } + @Override public void closeInternal() { // The circuit breaker that tracks the values {@link BitArray} is adjusted outside diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java index 7218f3d2771c..c8921a7c9f02 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java @@ -10,6 +10,8 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -27,6 +29,9 @@ public sealed interface BooleanVector extends Vector permits ConstantBooleanVect @Override BooleanVector filter(int... positions); + @Override + ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize); + /** * Compares the given object with this vector for equality. Returns {@code true} if and only if the * given object is a BooleanVector, and both vectors are {@link #equals(BooleanVector, BooleanVector) equal}. diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java index 013718bb42a7..193e6ea5d896 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java @@ -52,9 +52,8 @@ public BooleanBlock filter(int... positions) { } @Override - public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { - // TODO optimizations - return new BooleanLookup(this, positions, targetBlockSize); + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return vector.lookup(positions, targetBlockSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java index 75cf4a2e1fe5..61bbfb5ebbd0 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java @@ -11,7 +11,9 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BytesRefArray; +import org.elasticsearch.core.ReleasableIterator; import org.elasticsearch.core.Releasables; import java.io.IOException; @@ -91,6 +93,11 @@ public BytesRefVector filter(int... positions) { } } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new BytesRefLookup(asBlock(), positions, targetBlockSize); + } + public static long ramBytesEstimated(BytesRefArray values) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java index 4f07ca2d6104..3739dccb0f95 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java @@ -11,6 +11,8 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -34,6 +36,9 @@ public sealed interface BytesRefVector extends Vector permits ConstantBytesRefVe @Override BytesRefVector filter(int... positions); + @Override + ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize); + /** * Compares the given object with this vector for equality. Returns {@code true} if and only if the * given object is a BytesRefVector, and both vectors are {@link #equals(BytesRefVector, BytesRefVector) equal}. diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java index 39bd37ea9bc3..16a8fc088809 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java @@ -63,9 +63,8 @@ public BytesRefBlock filter(int... positions) { } @Override - public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { - // TODO optimizations - return new BytesRefLookup(this, positions, targetBlockSize); + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return vector.lookup(positions, targetBlockSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java index 16d70d1a0e80..1f6786f64e0a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java @@ -8,6 +8,8 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; /** * Vector implementation that stores a constant boolean value. @@ -39,6 +41,28 @@ public BooleanVector filter(int... positions) { return blockFactory().newConstantBooleanVector(value, positions.length); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + if (positions.getPositionCount() == 0) { + return ReleasableIterator.empty(); + } + IntVector positionsVector = positions.asVector(); + if (positionsVector == null) { + return new BooleanLookup(asBlock(), positions, targetBlockSize); + } + int min = positionsVector.min(); + if (min < 0) { + throw new IllegalArgumentException("invalid position [" + min + "]"); + } + if (min > getPositionCount()) { + return ReleasableIterator.single((BooleanBlock) positions.blockFactory().newConstantNullBlock(positions.getPositionCount())); + } + if (positionsVector.max() < getPositionCount()) { + return ReleasableIterator.single(positions.blockFactory().newConstantBooleanBlockWith(value, positions.getPositionCount())); + } + return new BooleanLookup(asBlock(), positions, targetBlockSize); + } + @Override public ElementType elementType() { return ElementType.BOOLEAN; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java index eed780a42f7b..33967d66374c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java @@ -9,6 +9,8 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; /** * Vector implementation that stores a constant BytesRef value. @@ -45,6 +47,28 @@ public BytesRefVector filter(int... positions) { return blockFactory().newConstantBytesRefVector(value, positions.length); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + if (positions.getPositionCount() == 0) { + return ReleasableIterator.empty(); + } + IntVector positionsVector = positions.asVector(); + if (positionsVector == null) { + return new BytesRefLookup(asBlock(), positions, targetBlockSize); + } + int min = positionsVector.min(); + if (min < 0) { + throw new IllegalArgumentException("invalid position [" + min + "]"); + } + if (min > getPositionCount()) { + return ReleasableIterator.single((BytesRefBlock) positions.blockFactory().newConstantNullBlock(positions.getPositionCount())); + } + if (positionsVector.max() < getPositionCount()) { + return ReleasableIterator.single(positions.blockFactory().newConstantBytesRefBlockWith(value, positions.getPositionCount())); + } + return new BytesRefLookup(asBlock(), positions, targetBlockSize); + } + @Override public ElementType elementType() { return ElementType.BYTES_REF; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java index a783f0243313..1ddf31d753d4 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java @@ -8,6 +8,8 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; /** * Vector implementation that stores a constant double value. @@ -39,6 +41,28 @@ public DoubleVector filter(int... positions) { return blockFactory().newConstantDoubleVector(value, positions.length); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + if (positions.getPositionCount() == 0) { + return ReleasableIterator.empty(); + } + IntVector positionsVector = positions.asVector(); + if (positionsVector == null) { + return new DoubleLookup(asBlock(), positions, targetBlockSize); + } + int min = positionsVector.min(); + if (min < 0) { + throw new IllegalArgumentException("invalid position [" + min + "]"); + } + if (min > getPositionCount()) { + return ReleasableIterator.single((DoubleBlock) positions.blockFactory().newConstantNullBlock(positions.getPositionCount())); + } + if (positionsVector.max() < getPositionCount()) { + return ReleasableIterator.single(positions.blockFactory().newConstantDoubleBlockWith(value, positions.getPositionCount())); + } + return new DoubleLookup(asBlock(), positions, targetBlockSize); + } + @Override public ElementType elementType() { return ElementType.DOUBLE; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java index c6d463af7cfa..e8fb8cb39ceb 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java @@ -8,6 +8,8 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; /** * Vector implementation that stores a constant int value. @@ -39,6 +41,28 @@ public IntVector filter(int... positions) { return blockFactory().newConstantIntVector(value, positions.length); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + if (positions.getPositionCount() == 0) { + return ReleasableIterator.empty(); + } + IntVector positionsVector = positions.asVector(); + if (positionsVector == null) { + return new IntLookup(asBlock(), positions, targetBlockSize); + } + int min = positionsVector.min(); + if (min < 0) { + throw new IllegalArgumentException("invalid position [" + min + "]"); + } + if (min > getPositionCount()) { + return ReleasableIterator.single((IntBlock) positions.blockFactory().newConstantNullBlock(positions.getPositionCount())); + } + if (positionsVector.max() < getPositionCount()) { + return ReleasableIterator.single(positions.blockFactory().newConstantIntBlockWith(value, positions.getPositionCount())); + } + return new IntLookup(asBlock(), positions, targetBlockSize); + } + /** * The minimum value in the block. */ diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java index 0173f1c1d4d7..b997cbbe2284 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java @@ -8,6 +8,8 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; /** * Vector implementation that stores a constant long value. @@ -39,6 +41,28 @@ public LongVector filter(int... positions) { return blockFactory().newConstantLongVector(value, positions.length); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + if (positions.getPositionCount() == 0) { + return ReleasableIterator.empty(); + } + IntVector positionsVector = positions.asVector(); + if (positionsVector == null) { + return new LongLookup(asBlock(), positions, targetBlockSize); + } + int min = positionsVector.min(); + if (min < 0) { + throw new IllegalArgumentException("invalid position [" + min + "]"); + } + if (min > getPositionCount()) { + return ReleasableIterator.single((LongBlock) positions.blockFactory().newConstantNullBlock(positions.getPositionCount())); + } + if (positionsVector.max() < getPositionCount()) { + return ReleasableIterator.single(positions.blockFactory().newConstantLongBlockWith(value, positions.getPositionCount())); + } + return new LongLookup(asBlock(), positions, targetBlockSize); + } + @Override public ElementType elementType() { return ElementType.LONG; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java index 476d5e55c55a..e7c1d342133d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java @@ -10,6 +10,8 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; import java.util.stream.Collectors; @@ -90,6 +92,11 @@ public DoubleVector filter(int... positions) { } } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new DoubleLookup(asBlock(), positions, targetBlockSize); + } + public static long ramBytesEstimated(double[] values) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java index 8f6aedf31b50..d558eabd2dd4 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java @@ -10,8 +10,10 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.DoubleArray; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -86,6 +88,11 @@ public DoubleVector filter(int... positions) { return new DoubleBigArrayVector(filtered, positions.length, blockFactory); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new DoubleLookup(asBlock(), positions, targetBlockSize); + } + @Override public void closeInternal() { // The circuit breaker that tracks the values {@link DoubleArray} is adjusted outside diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java index 1d71575b3331..3d93043f93d8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java @@ -10,6 +10,8 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -27,6 +29,9 @@ public sealed interface DoubleVector extends Vector permits ConstantDoubleVector @Override DoubleVector filter(int... positions); + @Override + ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize); + /** * Compares the given object with this vector for equality. Returns {@code true} if and only if the * given object is a DoubleVector, and both vectors are {@link #equals(DoubleVector, DoubleVector) equal}. diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java index e76a4e0c5fde..24887bebcd83 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java @@ -52,9 +52,8 @@ public DoubleBlock filter(int... positions) { } @Override - public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { - // TODO optimizations - return new DoubleLookup(this, positions, targetBlockSize); + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return vector.lookup(positions, targetBlockSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java index 97bf1675a9a3..e9d9a6b3fb95 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java @@ -10,6 +10,8 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; import java.util.stream.Collectors; @@ -100,6 +102,11 @@ public IntVector filter(int... positions) { } } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new IntLookup(asBlock(), positions, targetBlockSize); + } + public static long ramBytesEstimated(int[] values) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java index fe89782bad0e..df8298b87237 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java @@ -10,8 +10,10 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.IntArray; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -126,6 +128,11 @@ public IntVector filter(int... positions) { return new IntBigArrayVector(filtered, positions.length, blockFactory); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new IntLookup(asBlock(), positions, targetBlockSize); + } + @Override public void closeInternal() { // The circuit breaker that tracks the values {@link IntArray} is adjusted outside diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java index 8f6f42b66fbe..b1a2d1b80a41 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java @@ -10,6 +10,8 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -27,6 +29,9 @@ public sealed interface IntVector extends Vector permits ConstantIntVector, IntA @Override IntVector filter(int... positions); + @Override + ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize); + /** * The minimum value in the Vector. An empty Vector will return {@link Integer#MAX_VALUE}. */ diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java index 70bcf6919bea..ae28fb9f6ffa 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java @@ -52,9 +52,8 @@ public IntBlock filter(int... positions) { } @Override - public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { - // TODO optimizations - return new IntLookup(this, positions, targetBlockSize); + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return vector.lookup(positions, targetBlockSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java index 4b504943b760..5fa904dcf1ac 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java @@ -10,6 +10,8 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; import java.util.stream.Collectors; @@ -90,6 +92,11 @@ public LongVector filter(int... positions) { } } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new LongLookup(asBlock(), positions, targetBlockSize); + } + public static long ramBytesEstimated(long[] values) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java index d30dedd4cce1..a7828788169c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java @@ -10,8 +10,10 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -86,6 +88,11 @@ public LongVector filter(int... positions) { return new LongBigArrayVector(filtered, positions.length, blockFactory); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new LongLookup(asBlock(), positions, targetBlockSize); + } + @Override public void closeInternal() { // The circuit breaker that tracks the values {@link LongArray} is adjusted outside diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java index 2ebdb89a3126..e2f53d1ee07f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java @@ -10,6 +10,8 @@ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -27,6 +29,9 @@ public sealed interface LongVector extends Vector permits ConstantLongVector, Lo @Override LongVector filter(int... positions); + @Override + ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize); + /** * Compares the given object with this vector for equality. Returns {@code true} if and only if the * given object is a LongVector, and both vectors are {@link #equals(LongVector, LongVector) equal}. diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java index b6f1e8e77505..01921e1195f4 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java @@ -52,9 +52,8 @@ public LongBlock filter(int... positions) { } @Override - public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { - // TODO optimizations - return new LongLookup(this, positions, targetBlockSize); + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return vector.lookup(positions, targetBlockSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index cfa1d3656ba3..9a6b701a2e4e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -139,19 +139,19 @@ public interface Block extends Accountable, BlockLoader.Block, NamedWriteable, R * same number of {@link #getPositionCount() positions} as the {@code positions} * parameter. *

- * For example, this this block contained {@code [a, b, [b, c]]} + * For example, if this block contained {@code [a, b, [b, c]]} * and were called with the block {@code [0, 1, 1, [1, 2]]} then the * result would be {@code [a, b, b, [b, b, c]]}. *

*

* This process produces {@code count(this) * count(positions)} values per - * positions which could be quite quite large. Instead of returning a single + * positions which could be quite large. Instead of returning a single * Block, this returns an Iterator of Blocks containing all of the promised * values. *

*

- * The returned {@link ReleasableIterator} may retain a reference to {@link Block}s - * inside the {@link Page}. Close it to release those references. + * The returned {@link ReleasableIterator} may retain a reference to the + * {@code positions} parameter. Close it to release those references. *

*

* This block is built using the same {@link BlockFactory} as was used to diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullVector.java index 4deededdf41c..a8a6dbaf382f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullVector.java @@ -9,6 +9,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -45,6 +47,12 @@ public ConstantNullVector filter(int... positions) { throw new UnsupportedOperationException("null vector"); } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + assert false : "null vector"; + throw new UnsupportedOperationException("null vector"); + } + @Override public boolean getBoolean(int position) { assert false : "null vector"; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java index e5a0d934aa01..da9ca2bbae27 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java @@ -52,7 +52,7 @@ public Block filter(int... positions) { @Override public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { - throw new UnsupportedOperationException(); + throw new UnsupportedOperationException("can't lookup values from DocBlock"); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java index 067fddd311cc..33f5797f60df 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java @@ -9,6 +9,8 @@ import org.apache.lucene.util.IntroSorter; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import org.elasticsearch.core.Releasables; import java.util.Objects; @@ -235,6 +237,11 @@ public DocVector filter(int... positions) { } } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + throw new UnsupportedOperationException("can't lookup values from DocVector"); + } + @Override public ElementType elementType() { return ElementType.DOC; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefVector.java index a67db54b68ec..ec0c7efa715a 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/OrdinalBytesRefVector.java @@ -10,6 +10,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import org.elasticsearch.core.Releasables; import java.io.IOException; @@ -120,6 +122,11 @@ public BytesRefVector filter(int... positions) { } } + @Override + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new BytesRefLookup(asBlock(), positions, targetBlockSize); + } + @Override public ElementType elementType() { return bytes.elementType(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java index 89b39569be45..9a5688685374 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java @@ -8,8 +8,10 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.Accountable; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.ReleasableIterator; /** * A dense Vector of single values. @@ -35,6 +37,33 @@ public interface Vector extends Accountable, RefCounted, Releasable { */ Vector filter(int... positions); + /** + * Builds an Iterator of new {@link Block}s with the same {@link #elementType} + * as this {@link Vector} whose values are copied from positions in this Vector. + * It has the same number of {@link #getPositionCount() positions} as the + * {@code positions} parameter. + *

+ * For example, if this vector contained {@code [a, b, c]} + * and were called with the block {@code [0, 1, 1, [1, 2]]} then the + * result would be {@code [a, b, b, [b, c]]}. + *

+ *

+ * This process produces {@code count(positions)} values per + * positions which could be quite large. Instead of returning a single + * Block, this returns an Iterator of Blocks containing all of the promised + * values. + *

+ *

+ * The returned {@link ReleasableIterator} may retain a reference to the + * {@code positions} parameter. Close it to release those references. + *

+ *

+ * This block is built using the same {@link BlockFactory} as was used to + * build the {@code positions} parameter. + *

+ */ + ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize); + /** * {@return the element type of this vector} */ diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st index 4afd8db62f84..d594d32898d3 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st @@ -12,7 +12,9 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BytesRefArray; +import org.elasticsearch.core.ReleasableIterator; import org.elasticsearch.core.Releasables; import java.io.IOException; @@ -21,6 +23,8 @@ $else$ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; import java.util.stream.Collectors; @@ -168,6 +172,11 @@ $endif$ } } + @Override + public ReleasableIterator<$Type$Block> lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new $Type$Lookup(asBlock(), positions, targetBlockSize); + } + public static long ramBytesEstimated($if(BytesRef)$BytesRefArray$else$$type$[]$endif$ values) { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st index d6a8723748c1..30ef9e799cf1 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st @@ -10,8 +10,10 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.$Array$; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -148,6 +150,11 @@ $endif$ return new $Type$BigArrayVector(filtered, positions.length, blockFactory); } + @Override + public ReleasableIterator<$Type$Block> lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return new $Type$Lookup(asBlock(), positions, targetBlockSize); + } + @Override public void closeInternal() { // The circuit breaker that tracks the values {@link $if(boolean)$Bit$else$$Type$$endif$Array} is adjusted outside diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st index 37cb2d241252..42c34128121a 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st @@ -11,6 +11,8 @@ $if(BytesRef)$ import org.apache.lucene.util.BytesRef; $endif$ import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; /** * Vector implementation that stores a constant $type$ value. @@ -58,6 +60,28 @@ $endif$ return blockFactory().newConstant$Type$Vector(value, positions.length); } + @Override + public ReleasableIterator<$Type$Block> lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + if (positions.getPositionCount() == 0) { + return ReleasableIterator.empty(); + } + IntVector positionsVector = positions.asVector(); + if (positionsVector == null) { + return new $Type$Lookup(asBlock(), positions, targetBlockSize); + } + int min = positionsVector.min(); + if (min < 0) { + throw new IllegalArgumentException("invalid position [" + min + "]"); + } + if (min > getPositionCount()) { + return ReleasableIterator.single(($Type$Block) positions.blockFactory().newConstantNullBlock(positions.getPositionCount())); + } + if (positionsVector.max() < getPositionCount()) { + return ReleasableIterator.single(positions.blockFactory().newConstant$Type$BlockWith(value, positions.getPositionCount())); + } + return new $Type$Lookup(asBlock(), positions, targetBlockSize); + } + $if(int)$ /** * The minimum value in the block. diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st index 746ccc97a281..628ee93ed757 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st @@ -13,6 +13,8 @@ $endif$ import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.ReleasableIterator; import java.io.IOException; @@ -54,6 +56,9 @@ $endif$ @Override $Type$Vector filter(int... positions); + @Override + ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize); + $if(int)$ /** * The minimum value in the Vector. An empty Vector will return {@link Integer#MAX_VALUE}. diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st index f011d6f2a4b4..8f4390e8782c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st @@ -72,9 +72,8 @@ $endif$ } @Override - public ReleasableIterator<$Type$Block> lookup(IntBlock positions, ByteSizeValue targetBlockSize) { - // TODO optimizations - return new $Type$Lookup(this, positions, targetBlockSize); + public ReleasableIterator lookup(IntBlock positions, ByteSizeValue targetBlockSize) { + return vector.lookup(positions, targetBlockSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java index 3d80e560cc4d..017d4c7065be 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java @@ -35,6 +35,7 @@ import java.util.BitSet; import java.util.List; import java.util.function.BiConsumer; +import java.util.function.Consumer; import java.util.function.Supplier; import java.util.stream.IntStream; import java.util.stream.LongStream; @@ -283,8 +284,19 @@ public void testConstantIntBlock() { positions(blockFactory, 1, 2, new int[] { 1, 2 }), List.of(List.of(value), List.of(value), List.of(value, value)) ); + assertLookup( + block, + positions(blockFactory, 1, 2), + List.of(List.of(value), List.of(value)), + b -> assertThat(b.asVector(), instanceOf(ConstantIntVector.class)) + ); } - assertLookup(block, positions(blockFactory, positionCount + 1000), singletonList(null)); + assertLookup( + block, + positions(blockFactory, positionCount + 1000), + singletonList(null), + b -> assertThat(b, instanceOf(ConstantNullBlock.class)) + ); assertEmptyLookup(blockFactory, block); assertThat(block.asVector().min(), equalTo(value)); assertThat(block.asVector().max(), equalTo(value)); @@ -365,8 +377,19 @@ public void testConstantLongBlock() { positions(blockFactory, 1, 2, new int[] { 1, 2 }), List.of(List.of(value), List.of(value), List.of(value, value)) ); + assertLookup( + block, + positions(blockFactory, 1, 2), + List.of(List.of(value), List.of(value)), + b -> assertThat(b.asVector(), instanceOf(ConstantLongVector.class)) + ); } - assertLookup(block, positions(blockFactory, positionCount + 1000), singletonList(null)); + assertLookup( + block, + positions(blockFactory, positionCount + 1000), + singletonList(null), + b -> assertThat(b, instanceOf(ConstantNullBlock.class)) + ); assertEmptyLookup(blockFactory, block); releaseAndAssertBreaker(block); } @@ -447,8 +470,19 @@ public void testConstantDoubleBlock() { positions(blockFactory, 1, 2, new int[] { 1, 2 }), List.of(List.of(value), List.of(value), List.of(value, value)) ); + assertLookup( + block, + positions(blockFactory, 1, 2), + List.of(List.of(value), List.of(value)), + b -> assertThat(b.asVector(), instanceOf(ConstantDoubleVector.class)) + ); } - assertLookup(block, positions(blockFactory, positionCount + 1000), singletonList(null)); + assertLookup( + block, + positions(blockFactory, positionCount + 1000), + singletonList(null), + b -> assertThat(b, instanceOf(ConstantNullBlock.class)) + ); assertEmptyLookup(blockFactory, block); releaseAndAssertBreaker(block); } @@ -605,8 +639,19 @@ public void testConstantBytesRefBlock() { positions(blockFactory, 1, 2, new int[] { 1, 2 }), List.of(List.of(value), List.of(value), List.of(value, value)) ); + assertLookup( + block, + positions(blockFactory, 1, 2), + List.of(List.of(value), List.of(value)), + b -> assertThat(b.asVector(), instanceOf(ConstantBytesRefVector.class)) + ); } - assertLookup(block, positions(blockFactory, positionCount + 1000), singletonList(null)); + assertLookup( + block, + positions(blockFactory, positionCount + 1000), + singletonList(null), + b -> assertThat(b, instanceOf(ConstantNullBlock.class)) + ); assertEmptyLookup(blockFactory, block); releaseAndAssertBreaker(block); } @@ -689,8 +734,19 @@ public void testConstantBooleanBlock() { positions(blockFactory, 1, 2, new int[] { 1, 2 }), List.of(List.of(value), List.of(value), List.of(value, value)) ); + assertLookup( + block, + positions(blockFactory, 1, 2), + List.of(List.of(value), List.of(value)), + b -> assertThat(b.asVector(), instanceOf(ConstantBooleanVector.class)) + ); } - assertLookup(block, positions(blockFactory, positionCount + 1000), singletonList(null)); + assertLookup( + block, + positions(blockFactory, positionCount + 1000), + singletonList(null), + b -> assertThat(b, instanceOf(ConstantNullBlock.class)) + ); assertEmptyLookup(blockFactory, block); releaseAndAssertBreaker(block); } @@ -716,6 +772,24 @@ public void testConstantNullBlock() { assertThat(positionCount, is(block.getPositionCount())); assertThat(block.getPositionCount(), is(positionCount)); assertThat(block.isNull(randomPosition(positionCount)), is(true)); + if (positionCount > 2) { + List> expected = new ArrayList<>(); + expected.add(null); + expected.add(null); + expected.add(null); + assertLookup( + block, + positions(blockFactory, 1, 2, new int[] { 1, 2 }), + expected, + b -> assertThat(b, instanceOf(ConstantNullBlock.class)) + ); + } + assertLookup( + block, + positions(blockFactory, positionCount + 1000), + singletonList(null), + b -> assertThat(b, instanceOf(ConstantNullBlock.class)) + ); releaseAndAssertBreaker(block); } } @@ -1544,11 +1618,16 @@ static void assertEmptyLookup(BlockFactory blockFactory, Block block) { } static void assertLookup(Block block, IntBlock positions, List> expected) { + assertLookup(block, positions, expected, l -> {}); + } + + static void assertLookup(Block block, IntBlock positions, List> expected, Consumer extra) { try (positions; ReleasableIterator lookup = block.lookup(positions, ByteSizeValue.ofKb(100))) { assertThat(lookup.hasNext(), equalTo(true)); try (Block b = lookup.next()) { assertThat(valuesAtPositions(b, 0, b.getPositionCount()), equalTo(expected)); assertThat(b.blockFactory(), sameInstance(positions.blockFactory())); + extra.accept(b); } assertThat(lookup.hasNext(), equalTo(false)); } From 11de886346b02df8f23848bb31d29ad4b70f02f3 Mon Sep 17 00:00:00 2001 From: Fang Xing <155562079+fang-xing-esql@users.noreply.github.com> Date: Fri, 10 May 2024 14:49:25 -0400 Subject: [PATCH 057/119] [ES|QL] Add/Modify annotations for spatial and conditional functions for better doc generation (#107722) * annotation for spatial functions and conditional functions --- .../esql/functions/coalesce.asciidoc | 13 -- ...itional-functions-and-expressions.asciidoc | 8 +- .../esql/functions/description/case.asciidoc | 2 +- .../functions/description/greatest.asciidoc | 4 +- .../esql/functions/description/least.asciidoc | 2 +- .../description/st_contains.asciidoc | 2 +- .../description/st_disjoint.asciidoc | 2 +- .../description/st_intersects.asciidoc | 2 +- .../functions/description/st_within.asciidoc | 2 +- .../esql/functions/description/st_x.asciidoc | 2 +- .../esql/functions/description/st_y.asciidoc | 2 +- .../esql/functions/examples/case.asciidoc | 32 +++ .../esql/functions/examples/greatest.asciidoc | 13 ++ .../esql/functions/examples/least.asciidoc | 13 ++ .../esql/functions/examples/st_x.asciidoc | 13 ++ .../esql/functions/examples/st_y.asciidoc | 13 ++ .../esql/functions/greatest.asciidoc | 38 ---- .../functions/kibana/definition/case.json | 211 +++++++++++++++++- .../functions/kibana/definition/coalesce.json | 30 +-- .../functions/kibana/definition/greatest.json | 48 ++-- .../functions/kibana/definition/least.json | 47 ++-- .../kibana/definition/st_contains.json | 34 +-- .../kibana/definition/st_disjoint.json | 34 +-- .../kibana/definition/st_intersects.json | 34 +-- .../kibana/definition/st_within.json | 34 +-- .../functions/kibana/definition/st_x.json | 9 +- .../functions/kibana/definition/st_y.json | 9 +- .../esql/functions/kibana/docs/case.md | 16 +- .../esql/functions/kibana/docs/greatest.md | 8 +- .../esql/functions/kibana/docs/least.md | 6 +- .../esql/functions/kibana/docs/st_contains.md | 1 + .../esql/functions/kibana/docs/st_disjoint.md | 2 + .../functions/kibana/docs/st_intersects.md | 6 +- .../esql/functions/kibana/docs/st_within.md | 1 + .../esql/functions/kibana/docs/st_x.md | 7 +- .../esql/functions/kibana/docs/st_y.md | 7 +- .../esql/functions/layout/case.asciidoc | 1 + .../esql/functions/layout/greatest.asciidoc | 1 + .../esql/functions/layout/least.asciidoc | 1 + .../esql/functions/layout/st_x.asciidoc | 1 + .../esql/functions/layout/st_y.asciidoc | 1 + docs/reference/esql/functions/least.asciidoc | 38 ---- .../esql/functions/parameters/case.asciidoc | 4 +- .../functions/parameters/coalesce.asciidoc | 4 +- .../functions/parameters/greatest.asciidoc | 4 +- .../esql/functions/parameters/least.asciidoc | 4 +- .../functions/parameters/st_contains.asciidoc | 4 +- .../functions/parameters/st_disjoint.asciidoc | 4 +- .../parameters/st_intersects.asciidoc | 4 +- .../functions/parameters/st_within.asciidoc | 4 +- .../esql/functions/parameters/st_x.asciidoc | 2 +- .../esql/functions/parameters/st_y.asciidoc | 2 +- .../esql/functions/spatial-functions.asciidoc | 12 +- .../esql/functions/st_contains.asciidoc | 26 --- .../esql/functions/st_disjoint.asciidoc | 27 --- .../esql/functions/st_intersects.asciidoc | 31 --- .../esql/functions/st_within.asciidoc | 26 --- docs/reference/esql/functions/st_x.asciidoc | 33 --- docs/reference/esql/functions/st_y.asciidoc | 33 --- .../esql/functions/types/case.asciidoc | 12 +- .../src/main/resources/meta.csv-spec | 46 ++-- .../function/scalar/conditional/Case.java | 27 ++- .../function/scalar/conditional/Greatest.java | 23 +- .../function/scalar/conditional/Least.java | 21 +- .../function/scalar/nulls/Coalesce.java | 4 +- .../scalar/spatial/SpatialContains.java | 12 +- .../scalar/spatial/SpatialDisjoint.java | 13 +- .../scalar/spatial/SpatialIntersects.java | 19 +- .../scalar/spatial/SpatialWithin.java | 12 +- .../function/scalar/spatial/StX.java | 17 +- .../function/scalar/spatial/StY.java | 17 +- .../scalar/conditional/CaseTests.java | 185 +++++++++++++-- 72 files changed, 860 insertions(+), 522 deletions(-) delete mode 100644 docs/reference/esql/functions/coalesce.asciidoc create mode 100644 docs/reference/esql/functions/examples/case.asciidoc create mode 100644 docs/reference/esql/functions/examples/greatest.asciidoc create mode 100644 docs/reference/esql/functions/examples/least.asciidoc create mode 100644 docs/reference/esql/functions/examples/st_x.asciidoc create mode 100644 docs/reference/esql/functions/examples/st_y.asciidoc delete mode 100644 docs/reference/esql/functions/greatest.asciidoc delete mode 100644 docs/reference/esql/functions/least.asciidoc delete mode 100644 docs/reference/esql/functions/st_contains.asciidoc delete mode 100644 docs/reference/esql/functions/st_disjoint.asciidoc delete mode 100644 docs/reference/esql/functions/st_intersects.asciidoc delete mode 100644 docs/reference/esql/functions/st_within.asciidoc delete mode 100644 docs/reference/esql/functions/st_x.asciidoc delete mode 100644 docs/reference/esql/functions/st_y.asciidoc diff --git a/docs/reference/esql/functions/coalesce.asciidoc b/docs/reference/esql/functions/coalesce.asciidoc deleted file mode 100644 index 2d8c0f379c82..000000000000 --- a/docs/reference/esql/functions/coalesce.asciidoc +++ /dev/null @@ -1,13 +0,0 @@ -[discrete] -[[esql-coalesce]] -=== `COALESCE` - -*Syntax* - -[source,esql] ----- -COALESCE(expression1 [, ..., expressionN]) ----- -include::parameters/coalesce.asciidoc[] -include::description/coalesce.asciidoc[] -include::examples/coalesce.asciidoc[] diff --git a/docs/reference/esql/functions/conditional-functions-and-expressions.asciidoc b/docs/reference/esql/functions/conditional-functions-and-expressions.asciidoc index d835a14856c0..081e3b8589db 100644 --- a/docs/reference/esql/functions/conditional-functions-and-expressions.asciidoc +++ b/docs/reference/esql/functions/conditional-functions-and-expressions.asciidoc @@ -15,7 +15,7 @@ manner. {esql} supports these conditional functions: * <> // end::cond_list[] -include::case.asciidoc[] -include::coalesce.asciidoc[] -include::greatest.asciidoc[] -include::least.asciidoc[] +include::layout/case.asciidoc[] +include::layout/coalesce.asciidoc[] +include::layout/greatest.asciidoc[] +include::layout/least.asciidoc[] diff --git a/docs/reference/esql/functions/description/case.asciidoc b/docs/reference/esql/functions/description/case.asciidoc index 5c98a7a2620d..c3e80301fbc3 100644 --- a/docs/reference/esql/functions/description/case.asciidoc +++ b/docs/reference/esql/functions/description/case.asciidoc @@ -2,4 +2,4 @@ *Description* -Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true. +Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to `true`. If the number of arguments is odd, the last argument is the default value which is returned when no condition matches. If the number of arguments is even, and no condition matches, the function returns `null`. diff --git a/docs/reference/esql/functions/description/greatest.asciidoc b/docs/reference/esql/functions/description/greatest.asciidoc index 3c7cfd3bfb14..ed705d0bbb59 100644 --- a/docs/reference/esql/functions/description/greatest.asciidoc +++ b/docs/reference/esql/functions/description/greatest.asciidoc @@ -2,4 +2,6 @@ *Description* -Returns the maximum value from many columns. +Returns the maximum value from multiple columns. This is similar to <> except it is intended to run on multiple columns at once. + +NOTE: When run on `keyword` or `text` fields, this returns the last string in alphabetical order. When run on `boolean` columns this will return `true` if any values are `true`. diff --git a/docs/reference/esql/functions/description/least.asciidoc b/docs/reference/esql/functions/description/least.asciidoc index 2aeb1f85aa51..c5daf0bc79ae 100644 --- a/docs/reference/esql/functions/description/least.asciidoc +++ b/docs/reference/esql/functions/description/least.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns the minimum value from many columns. +Returns the minimum value from multiple columns. This is similar to <> except it is intended to run on multiple columns at once. diff --git a/docs/reference/esql/functions/description/st_contains.asciidoc b/docs/reference/esql/functions/description/st_contains.asciidoc index 678fde7f5d98..a2c81b9d24a1 100644 --- a/docs/reference/esql/functions/description/st_contains.asciidoc +++ b/docs/reference/esql/functions/description/st_contains.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns whether the first geometry contains the second geometry. +Returns whether the first geometry contains the second geometry. This is the inverse of the <> function. diff --git a/docs/reference/esql/functions/description/st_disjoint.asciidoc b/docs/reference/esql/functions/description/st_disjoint.asciidoc index 95ab02a39614..461dd61daef7 100644 --- a/docs/reference/esql/functions/description/st_disjoint.asciidoc +++ b/docs/reference/esql/functions/description/st_disjoint.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns whether the two geometries or geometry columns are disjoint. +Returns whether the two geometries or geometry columns are disjoint. This is the inverse of the <> function. In mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅ diff --git a/docs/reference/esql/functions/description/st_intersects.asciidoc b/docs/reference/esql/functions/description/st_intersects.asciidoc index b736ba29a6c8..48fd7bdb2f33 100644 --- a/docs/reference/esql/functions/description/st_intersects.asciidoc +++ b/docs/reference/esql/functions/description/st_intersects.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns whether the two geometries or geometry columns intersect. +Returns true if two geometries intersect. They intersect if they have any point in common, including their interior points (points along lines or within polygons). This is the inverse of the <> function. In mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅ diff --git a/docs/reference/esql/functions/description/st_within.asciidoc b/docs/reference/esql/functions/description/st_within.asciidoc index 890f28cb769b..38a34f518234 100644 --- a/docs/reference/esql/functions/description/st_within.asciidoc +++ b/docs/reference/esql/functions/description/st_within.asciidoc @@ -2,4 +2,4 @@ *Description* -Returns whether the first geometry is within the second geometry. +Returns whether the first geometry is within the second geometry. This is the inverse of the <> function. diff --git a/docs/reference/esql/functions/description/st_x.asciidoc b/docs/reference/esql/functions/description/st_x.asciidoc index beb077bea332..33d867f86242 100644 --- a/docs/reference/esql/functions/description/st_x.asciidoc +++ b/docs/reference/esql/functions/description/st_x.asciidoc @@ -2,4 +2,4 @@ *Description* -Extracts the x-coordinate from a point geometry. +Extracts the `x` coordinate from the supplied point. If the points is of type `geo_point` this is equivalent to extracting the `longitude` value. diff --git a/docs/reference/esql/functions/description/st_y.asciidoc b/docs/reference/esql/functions/description/st_y.asciidoc index 19c371d2ef93..b03956a51e1a 100644 --- a/docs/reference/esql/functions/description/st_y.asciidoc +++ b/docs/reference/esql/functions/description/st_y.asciidoc @@ -2,4 +2,4 @@ *Description* -Extracts the y-coordinate from a point geometry. +Extracts the `y` coordinate from the supplied point. If the points is of type `geo_point` this is equivalent to extracting the `latitude` value. diff --git a/docs/reference/esql/functions/examples/case.asciidoc b/docs/reference/esql/functions/examples/case.asciidoc new file mode 100644 index 000000000000..c5c766512ce0 --- /dev/null +++ b/docs/reference/esql/functions/examples/case.asciidoc @@ -0,0 +1,32 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Examples* + +Determine whether employees are monolingual, bilingual, or polyglot: +[source.merge.styled,esql] +---- +include::{esql-specs}/docs.csv-spec[tag=case] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/docs.csv-spec[tag=case-result] +|=== +Calculate the total connection success rate based on log messages: +[source.merge.styled,esql] +---- +include::{esql-specs}/conditional.csv-spec[tag=docsCaseSuccessRate] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/conditional.csv-spec[tag=docsCaseSuccessRate-result] +|=== +Calculate an hourly error rate as a percentage of the total number of log messages: +[source.merge.styled,esql] +---- +include::{esql-specs}/conditional.csv-spec[tag=docsCaseHourlyErrorRate] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/conditional.csv-spec[tag=docsCaseHourlyErrorRate-result] +|=== + diff --git a/docs/reference/esql/functions/examples/greatest.asciidoc b/docs/reference/esql/functions/examples/greatest.asciidoc new file mode 100644 index 000000000000..bd89ad1b3cdd --- /dev/null +++ b/docs/reference/esql/functions/examples/greatest.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/math.csv-spec[tag=greatest] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/math.csv-spec[tag=greatest-result] +|=== + diff --git a/docs/reference/esql/functions/examples/least.asciidoc b/docs/reference/esql/functions/examples/least.asciidoc new file mode 100644 index 000000000000..67fc5260f639 --- /dev/null +++ b/docs/reference/esql/functions/examples/least.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/math.csv-spec[tag=least] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/math.csv-spec[tag=least-result] +|=== + diff --git a/docs/reference/esql/functions/examples/st_x.asciidoc b/docs/reference/esql/functions/examples/st_x.asciidoc new file mode 100644 index 000000000000..895e76c6c04e --- /dev/null +++ b/docs/reference/esql/functions/examples/st_x.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/spatial.csv-spec[tag=st_x_y] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/spatial.csv-spec[tag=st_x_y-result] +|=== + diff --git a/docs/reference/esql/functions/examples/st_y.asciidoc b/docs/reference/esql/functions/examples/st_y.asciidoc new file mode 100644 index 000000000000..895e76c6c04e --- /dev/null +++ b/docs/reference/esql/functions/examples/st_y.asciidoc @@ -0,0 +1,13 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/spatial.csv-spec[tag=st_x_y] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/spatial.csv-spec[tag=st_x_y-result] +|=== + diff --git a/docs/reference/esql/functions/greatest.asciidoc b/docs/reference/esql/functions/greatest.asciidoc deleted file mode 100644 index 003f1f46e6db..000000000000 --- a/docs/reference/esql/functions/greatest.asciidoc +++ /dev/null @@ -1,38 +0,0 @@ -[discrete] -[[esql-greatest]] -=== `GREATEST` - -*Syntax* - -[.text-center] -image::esql/functions/signature/greatest.svg[Embedded,opts=inline] - -*Parameters* - -`first`:: -First of the columns to evaluate. - -`rest`:: -The rest of the columns to evaluate. - -*Description* - -Returns the maximum value from multiple columns. This is similar to <> -except it is intended to run on multiple columns at once. - -NOTE: When run on `keyword` or `text` fields, this returns the last string - in alphabetical order. When run on `boolean` columns this will return - `true` if any values are `true`. - -include::types/greatest.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/math.csv-spec[tag=greatest] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/math.csv-spec[tag=greatest-result] -|=== diff --git a/docs/reference/esql/functions/kibana/definition/case.json b/docs/reference/esql/functions/kibana/definition/case.json index 73bc215ac6ad..5959eed62d37 100644 --- a/docs/reference/esql/functions/kibana/definition/case.json +++ b/docs/reference/esql/functions/kibana/definition/case.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "case", - "description" : "Accepts pairs of conditions and values.\nThe function returns the value that belongs to the first condition that evaluates to true.", + "description" : "Accepts pairs of conditions and values. The function returns the value that\nbelongs to the first condition that evaluates to `true`.\n\nIf the number of arguments is odd, the last argument is the default value which\nis returned when no condition matches. If the number of arguments is even, and\nno condition matches, the function returns `null`.", "signatures" : [ { "params" : [ @@ -10,23 +10,226 @@ "name" : "condition", "type" : "boolean", "optional" : false, - "description" : "" + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "boolean", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "cartesian_point", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "cartesian_point" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "datetime", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "datetime" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "double", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "double" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "geo_point", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "geo_point" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "integer", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "integer" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "ip", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "ip" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." }, { "name" : "trueValue", "type" : "keyword", "optional" : false, - "description" : "" + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." }, { "name" : "falseValue", "type" : "keyword", "optional" : true, - "description" : "" + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." } ], "variadic" : true, "returnType" : "keyword" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "long", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "long" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "text", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "text" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "unsigned_long", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "condition", + "type" : "boolean", + "optional" : false, + "description" : "A condition." + }, + { + "name" : "trueValue", + "type" : "version", + "optional" : false, + "description" : "The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches." + } + ], + "variadic" : true, + "returnType" : "version" } + ], + "examples" : [ + "FROM employees\n| EVAL type = CASE(\n languages <= 1, \"monolingual\",\n languages <= 2, \"bilingual\",\n \"polyglot\")\n| KEEP emp_no, languages, type", + "FROM sample_data\n| EVAL successful = CASE(\n STARTS_WITH(message, \"Connected to\"), 1,\n message == \"Connection error\", 0\n )\n| STATS success_rate = AVG(successful)", + "FROM sample_data\n| EVAL error = CASE(message LIKE \"*error*\", 1, 0)\n| EVAL hour = DATE_TRUNC(1 hour, @timestamp)\n| STATS error_rate = AVG(error) by hour\n| SORT hour" ] } diff --git a/docs/reference/esql/functions/kibana/definition/coalesce.json b/docs/reference/esql/functions/kibana/definition/coalesce.json index 87feead06d09..1081b4283957 100644 --- a/docs/reference/esql/functions/kibana/definition/coalesce.json +++ b/docs/reference/esql/functions/kibana/definition/coalesce.json @@ -10,7 +10,7 @@ "name" : "first", "type" : "boolean", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." } ], "variadic" : true, @@ -22,13 +22,13 @@ "name" : "first", "type" : "boolean", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." }, { "name" : "rest", "type" : "boolean", "optional" : true, - "description" : "Other expression to evaluate" + "description" : "Other expression to evaluate." } ], "variadic" : true, @@ -40,7 +40,7 @@ "name" : "first", "type" : "integer", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." } ], "variadic" : true, @@ -52,13 +52,13 @@ "name" : "first", "type" : "integer", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." }, { "name" : "rest", "type" : "integer", "optional" : true, - "description" : "Other expression to evaluate" + "description" : "Other expression to evaluate." } ], "variadic" : true, @@ -70,7 +70,7 @@ "name" : "first", "type" : "keyword", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." } ], "variadic" : true, @@ -82,13 +82,13 @@ "name" : "first", "type" : "keyword", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." }, { "name" : "rest", "type" : "keyword", "optional" : true, - "description" : "Other expression to evaluate" + "description" : "Other expression to evaluate." } ], "variadic" : true, @@ -100,7 +100,7 @@ "name" : "first", "type" : "long", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." } ], "variadic" : true, @@ -112,13 +112,13 @@ "name" : "first", "type" : "long", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." }, { "name" : "rest", "type" : "long", "optional" : true, - "description" : "Other expression to evaluate" + "description" : "Other expression to evaluate." } ], "variadic" : true, @@ -130,7 +130,7 @@ "name" : "first", "type" : "text", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." } ], "variadic" : true, @@ -142,13 +142,13 @@ "name" : "first", "type" : "text", "optional" : false, - "description" : "Expression to evaluate" + "description" : "Expression to evaluate." }, { "name" : "rest", "type" : "text", "optional" : true, - "description" : "Other expression to evaluate" + "description" : "Other expression to evaluate." } ], "variadic" : true, diff --git a/docs/reference/esql/functions/kibana/definition/greatest.json b/docs/reference/esql/functions/kibana/definition/greatest.json index f72f54708c6b..15c9f58d32d3 100644 --- a/docs/reference/esql/functions/kibana/definition/greatest.json +++ b/docs/reference/esql/functions/kibana/definition/greatest.json @@ -2,7 +2,8 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "greatest", - "description" : "Returns the maximum value from many columns.", + "description" : "Returns the maximum value from multiple columns. This is similar to <>\nexcept it is intended to run on multiple columns at once.", + "note" : "When run on `keyword` or `text` fields, this returns the last string in alphabetical order. When run on `boolean` columns this will return `true` if any values are `true`.", "signatures" : [ { "params" : [ @@ -10,7 +11,7 @@ "name" : "first", "type" : "boolean", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -22,13 +23,13 @@ "name" : "first", "type" : "boolean", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "boolean", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -40,13 +41,13 @@ "name" : "first", "type" : "double", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "double", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -58,7 +59,7 @@ "name" : "first", "type" : "integer", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -70,13 +71,13 @@ "name" : "first", "type" : "integer", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "integer", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -88,13 +89,13 @@ "name" : "first", "type" : "ip", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "ip", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -106,7 +107,7 @@ "name" : "first", "type" : "keyword", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -118,13 +119,13 @@ "name" : "first", "type" : "keyword", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "keyword", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -136,7 +137,7 @@ "name" : "first", "type" : "long", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -148,13 +149,13 @@ "name" : "first", "type" : "long", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "long", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -166,7 +167,7 @@ "name" : "first", "type" : "text", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -178,13 +179,13 @@ "name" : "first", "type" : "text", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "text", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -196,17 +197,20 @@ "name" : "first", "type" : "version", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "version", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, "returnType" : "version" } + ], + "examples" : [ + "ROW a = 10, b = 20\n| EVAL g = GREATEST(a, b)" ] } diff --git a/docs/reference/esql/functions/kibana/definition/least.json b/docs/reference/esql/functions/kibana/definition/least.json index 66efedc0c9fe..0b922ad6ad3c 100644 --- a/docs/reference/esql/functions/kibana/definition/least.json +++ b/docs/reference/esql/functions/kibana/definition/least.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "least", - "description" : "Returns the minimum value from many columns.", + "description" : "Returns the minimum value from multiple columns. This is similar to <> except it is intended to run on multiple columns at once.", "signatures" : [ { "params" : [ @@ -10,7 +10,7 @@ "name" : "first", "type" : "boolean", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -22,13 +22,13 @@ "name" : "first", "type" : "boolean", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "boolean", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -40,13 +40,13 @@ "name" : "first", "type" : "double", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "double", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -58,7 +58,7 @@ "name" : "first", "type" : "integer", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -70,13 +70,13 @@ "name" : "first", "type" : "integer", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "integer", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -88,13 +88,13 @@ "name" : "first", "type" : "ip", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "ip", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -106,7 +106,7 @@ "name" : "first", "type" : "keyword", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -118,13 +118,13 @@ "name" : "first", "type" : "keyword", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "keyword", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -136,7 +136,7 @@ "name" : "first", "type" : "long", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -148,13 +148,13 @@ "name" : "first", "type" : "long", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "long", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -166,7 +166,7 @@ "name" : "first", "type" : "text", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." } ], "variadic" : true, @@ -178,13 +178,13 @@ "name" : "first", "type" : "text", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "text", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, @@ -196,17 +196,20 @@ "name" : "first", "type" : "version", "optional" : false, - "description" : "" + "description" : "First of the columns to evaluate." }, { "name" : "rest", "type" : "version", "optional" : true, - "description" : "" + "description" : "The rest of the columns to evaluate." } ], "variadic" : true, "returnType" : "version" } + ], + "examples" : [ + "ROW a = 10, b = 20\n| EVAL l = LEAST(a, b)" ] } diff --git a/docs/reference/esql/functions/kibana/definition/st_contains.json b/docs/reference/esql/functions/kibana/definition/st_contains.json index f4f800391790..1ef76e46f371 100644 --- a/docs/reference/esql/functions/kibana/definition/st_contains.json +++ b/docs/reference/esql/functions/kibana/definition/st_contains.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "st_contains", - "description" : "Returns whether the first geometry contains the second geometry.", + "description" : "Returns whether the first geometry contains the second geometry.\nThis is the inverse of the <> function.", "signatures" : [ { "params" : [ @@ -10,13 +10,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -28,13 +28,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -46,13 +46,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -64,13 +64,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -82,13 +82,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -100,13 +100,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -118,13 +118,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -136,13 +136,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, diff --git a/docs/reference/esql/functions/kibana/definition/st_disjoint.json b/docs/reference/esql/functions/kibana/definition/st_disjoint.json index 98647b63ff18..e408a0f98fe6 100644 --- a/docs/reference/esql/functions/kibana/definition/st_disjoint.json +++ b/docs/reference/esql/functions/kibana/definition/st_disjoint.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "st_disjoint", - "description" : "Returns whether the two geometries or geometry columns are disjoint.", + "description" : "Returns whether the two geometries or geometry columns are disjoint.\nThis is the inverse of the <> function.\nIn mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅", "signatures" : [ { "params" : [ @@ -10,13 +10,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -28,13 +28,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -46,13 +46,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -64,13 +64,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -82,13 +82,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -100,13 +100,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -118,13 +118,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -136,13 +136,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, diff --git a/docs/reference/esql/functions/kibana/definition/st_intersects.json b/docs/reference/esql/functions/kibana/definition/st_intersects.json index ba619fe57ecf..2f9f255ab187 100644 --- a/docs/reference/esql/functions/kibana/definition/st_intersects.json +++ b/docs/reference/esql/functions/kibana/definition/st_intersects.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "st_intersects", - "description" : "Returns whether the two geometries or geometry columns intersect.", + "description" : "Returns true if two geometries intersect.\nThey intersect if they have any point in common, including their interior points\n(points along lines or within polygons).\nThis is the inverse of the <> function.\nIn mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅", "signatures" : [ { "params" : [ @@ -10,13 +10,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -28,13 +28,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -46,13 +46,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -64,13 +64,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -82,13 +82,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -100,13 +100,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -118,13 +118,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -136,13 +136,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, diff --git a/docs/reference/esql/functions/kibana/definition/st_within.json b/docs/reference/esql/functions/kibana/definition/st_within.json index ee98337441ab..e0cdf62fe0f9 100644 --- a/docs/reference/esql/functions/kibana/definition/st_within.json +++ b/docs/reference/esql/functions/kibana/definition/st_within.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "st_within", - "description" : "Returns whether the first geometry is within the second geometry.", + "description" : "Returns whether the first geometry is within the second geometry.\nThis is the inverse of the <> function.", "signatures" : [ { "params" : [ @@ -10,13 +10,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -28,13 +28,13 @@ "name" : "geomA", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -46,13 +46,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -64,13 +64,13 @@ "name" : "geomA", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "cartesian_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -82,13 +82,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -100,13 +100,13 @@ "name" : "geomA", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -118,13 +118,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_point", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, @@ -136,13 +136,13 @@ "name" : "geomA", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`." }, { "name" : "geomB", "type" : "geo_shape", "optional" : false, - "description" : "Geometry column name or variable of geometry type" + "description" : "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters." } ], "variadic" : false, diff --git a/docs/reference/esql/functions/kibana/definition/st_x.json b/docs/reference/esql/functions/kibana/definition/st_x.json index 57598b3470e1..c3554a2ee808 100644 --- a/docs/reference/esql/functions/kibana/definition/st_x.json +++ b/docs/reference/esql/functions/kibana/definition/st_x.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "st_x", - "description" : "Extracts the x-coordinate from a point geometry.", + "description" : "Extracts the `x` coordinate from the supplied point.\nIf the points is of type `geo_point` this is equivalent to extracting the `longitude` value.", "signatures" : [ { "params" : [ @@ -10,7 +10,7 @@ "name" : "point", "type" : "cartesian_point", "optional" : false, - "description" : "" + "description" : "Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`." } ], "variadic" : false, @@ -22,11 +22,14 @@ "name" : "point", "type" : "geo_point", "optional" : false, - "description" : "" + "description" : "Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`." } ], "variadic" : false, "returnType" : "double" } + ], + "examples" : [ + "ROW point = TO_GEOPOINT(\"POINT(42.97109629958868 14.7552534006536)\")\n| EVAL x = ST_X(point), y = ST_Y(point)" ] } diff --git a/docs/reference/esql/functions/kibana/definition/st_y.json b/docs/reference/esql/functions/kibana/definition/st_y.json index 0dacaa56bb8d..2966ae04f75e 100644 --- a/docs/reference/esql/functions/kibana/definition/st_y.json +++ b/docs/reference/esql/functions/kibana/definition/st_y.json @@ -2,7 +2,7 @@ "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", "type" : "eval", "name" : "st_y", - "description" : "Extracts the y-coordinate from a point geometry.", + "description" : "Extracts the `y` coordinate from the supplied point.\nIf the points is of type `geo_point` this is equivalent to extracting the `latitude` value.", "signatures" : [ { "params" : [ @@ -10,7 +10,7 @@ "name" : "point", "type" : "cartesian_point", "optional" : false, - "description" : "" + "description" : "Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`." } ], "variadic" : false, @@ -22,11 +22,14 @@ "name" : "point", "type" : "geo_point", "optional" : false, - "description" : "" + "description" : "Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`." } ], "variadic" : false, "returnType" : "double" } + ], + "examples" : [ + "ROW point = TO_GEOPOINT(\"POINT(42.97109629958868 14.7552534006536)\")\n| EVAL x = ST_X(point), y = ST_Y(point)" ] } diff --git a/docs/reference/esql/functions/kibana/docs/case.md b/docs/reference/esql/functions/kibana/docs/case.md index e1494a5c2af8..8bb31ee97275 100644 --- a/docs/reference/esql/functions/kibana/docs/case.md +++ b/docs/reference/esql/functions/kibana/docs/case.md @@ -3,6 +3,18 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### CASE -Accepts pairs of conditions and values. -The function returns the value that belongs to the first condition that evaluates to true. +Accepts pairs of conditions and values. The function returns the value that +belongs to the first condition that evaluates to `true`. +If the number of arguments is odd, the last argument is the default value which +is returned when no condition matches. If the number of arguments is even, and +no condition matches, the function returns `null`. + +``` +FROM employees +| EVAL type = CASE( + languages <= 1, "monolingual", + languages <= 2, "bilingual", + "polyglot") +| KEEP emp_no, languages, type +``` diff --git a/docs/reference/esql/functions/kibana/docs/greatest.md b/docs/reference/esql/functions/kibana/docs/greatest.md index 3db0c9ed87aa..4b3b4027381f 100644 --- a/docs/reference/esql/functions/kibana/docs/greatest.md +++ b/docs/reference/esql/functions/kibana/docs/greatest.md @@ -3,5 +3,11 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### GREATEST -Returns the maximum value from many columns. +Returns the maximum value from multiple columns. This is similar to <> +except it is intended to run on multiple columns at once. +``` +ROW a = 10, b = 20 +| EVAL g = GREATEST(a, b) +``` +Note: When run on `keyword` or `text` fields, this returns the last string in alphabetical order. When run on `boolean` columns this will return `true` if any values are `true`. diff --git a/docs/reference/esql/functions/kibana/docs/least.md b/docs/reference/esql/functions/kibana/docs/least.md index ff2c19592c8e..7bbbcf79bc37 100644 --- a/docs/reference/esql/functions/kibana/docs/least.md +++ b/docs/reference/esql/functions/kibana/docs/least.md @@ -3,5 +3,9 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### LEAST -Returns the minimum value from many columns. +Returns the minimum value from multiple columns. This is similar to <> except it is intended to run on multiple columns at once. +``` +ROW a = 10, b = 20 +| EVAL l = LEAST(a, b) +``` diff --git a/docs/reference/esql/functions/kibana/docs/st_contains.md b/docs/reference/esql/functions/kibana/docs/st_contains.md index 6e23bb9b0f11..99f3a19f9df4 100644 --- a/docs/reference/esql/functions/kibana/docs/st_contains.md +++ b/docs/reference/esql/functions/kibana/docs/st_contains.md @@ -4,6 +4,7 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ ### ST_CONTAINS Returns whether the first geometry contains the second geometry. +This is the inverse of the <> function. ``` FROM airport_city_boundaries diff --git a/docs/reference/esql/functions/kibana/docs/st_disjoint.md b/docs/reference/esql/functions/kibana/docs/st_disjoint.md index 7cf66b168bd7..4b42954efa5c 100644 --- a/docs/reference/esql/functions/kibana/docs/st_disjoint.md +++ b/docs/reference/esql/functions/kibana/docs/st_disjoint.md @@ -4,6 +4,8 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ ### ST_DISJOINT Returns whether the two geometries or geometry columns are disjoint. +This is the inverse of the <> function. +In mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅ ``` FROM airport_city_boundaries diff --git a/docs/reference/esql/functions/kibana/docs/st_intersects.md b/docs/reference/esql/functions/kibana/docs/st_intersects.md index e4db33429dbe..b0a58b3ab235 100644 --- a/docs/reference/esql/functions/kibana/docs/st_intersects.md +++ b/docs/reference/esql/functions/kibana/docs/st_intersects.md @@ -3,7 +3,11 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### ST_INTERSECTS -Returns whether the two geometries or geometry columns intersect. +Returns true if two geometries intersect. +They intersect if they have any point in common, including their interior points +(points along lines or within polygons). +This is the inverse of the <> function. +In mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅ ``` FROM airports diff --git a/docs/reference/esql/functions/kibana/docs/st_within.md b/docs/reference/esql/functions/kibana/docs/st_within.md index cbb3ae5ee9ac..9ef046e5006f 100644 --- a/docs/reference/esql/functions/kibana/docs/st_within.md +++ b/docs/reference/esql/functions/kibana/docs/st_within.md @@ -4,6 +4,7 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ ### ST_WITHIN Returns whether the first geometry is within the second geometry. +This is the inverse of the <> function. ``` FROM airport_city_boundaries diff --git a/docs/reference/esql/functions/kibana/docs/st_x.md b/docs/reference/esql/functions/kibana/docs/st_x.md index af2f4de1487c..b113f19e1c76 100644 --- a/docs/reference/esql/functions/kibana/docs/st_x.md +++ b/docs/reference/esql/functions/kibana/docs/st_x.md @@ -3,5 +3,10 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### ST_X -Extracts the x-coordinate from a point geometry. +Extracts the `x` coordinate from the supplied point. +If the points is of type `geo_point` this is equivalent to extracting the `longitude` value. +``` +ROW point = TO_GEOPOINT("POINT(42.97109629958868 14.7552534006536)") +| EVAL x = ST_X(point), y = ST_Y(point) +``` diff --git a/docs/reference/esql/functions/kibana/docs/st_y.md b/docs/reference/esql/functions/kibana/docs/st_y.md index 575a5bd3c7d3..db88c3ada63b 100644 --- a/docs/reference/esql/functions/kibana/docs/st_y.md +++ b/docs/reference/esql/functions/kibana/docs/st_y.md @@ -3,5 +3,10 @@ This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../READ --> ### ST_Y -Extracts the y-coordinate from a point geometry. +Extracts the `y` coordinate from the supplied point. +If the points is of type `geo_point` this is equivalent to extracting the `latitude` value. +``` +ROW point = TO_GEOPOINT("POINT(42.97109629958868 14.7552534006536)") +| EVAL x = ST_X(point), y = ST_Y(point) +``` diff --git a/docs/reference/esql/functions/layout/case.asciidoc b/docs/reference/esql/functions/layout/case.asciidoc index 192e74522b8d..edfc768dc705 100644 --- a/docs/reference/esql/functions/layout/case.asciidoc +++ b/docs/reference/esql/functions/layout/case.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/case.svg[Embedded,opts=inline] include::../parameters/case.asciidoc[] include::../description/case.asciidoc[] include::../types/case.asciidoc[] +include::../examples/case.asciidoc[] diff --git a/docs/reference/esql/functions/layout/greatest.asciidoc b/docs/reference/esql/functions/layout/greatest.asciidoc index 1ff17f3c3adf..fff9a3241294 100644 --- a/docs/reference/esql/functions/layout/greatest.asciidoc +++ b/docs/reference/esql/functions/layout/greatest.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/greatest.svg[Embedded,opts=inline] include::../parameters/greatest.asciidoc[] include::../description/greatest.asciidoc[] include::../types/greatest.asciidoc[] +include::../examples/greatest.asciidoc[] diff --git a/docs/reference/esql/functions/layout/least.asciidoc b/docs/reference/esql/functions/layout/least.asciidoc index a14a166c8bfe..0daee9c181a6 100644 --- a/docs/reference/esql/functions/layout/least.asciidoc +++ b/docs/reference/esql/functions/layout/least.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/least.svg[Embedded,opts=inline] include::../parameters/least.asciidoc[] include::../description/least.asciidoc[] include::../types/least.asciidoc[] +include::../examples/least.asciidoc[] diff --git a/docs/reference/esql/functions/layout/st_x.asciidoc b/docs/reference/esql/functions/layout/st_x.asciidoc index ce3824aa157b..2c2dc191a31a 100644 --- a/docs/reference/esql/functions/layout/st_x.asciidoc +++ b/docs/reference/esql/functions/layout/st_x.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/st_x.svg[Embedded,opts=inline] include::../parameters/st_x.asciidoc[] include::../description/st_x.asciidoc[] include::../types/st_x.asciidoc[] +include::../examples/st_x.asciidoc[] diff --git a/docs/reference/esql/functions/layout/st_y.asciidoc b/docs/reference/esql/functions/layout/st_y.asciidoc index 702e9097ae68..0708465760bb 100644 --- a/docs/reference/esql/functions/layout/st_y.asciidoc +++ b/docs/reference/esql/functions/layout/st_y.asciidoc @@ -12,3 +12,4 @@ image::esql/functions/signature/st_y.svg[Embedded,opts=inline] include::../parameters/st_y.asciidoc[] include::../description/st_y.asciidoc[] include::../types/st_y.asciidoc[] +include::../examples/st_y.asciidoc[] diff --git a/docs/reference/esql/functions/least.asciidoc b/docs/reference/esql/functions/least.asciidoc deleted file mode 100644 index 2860eb31090c..000000000000 --- a/docs/reference/esql/functions/least.asciidoc +++ /dev/null @@ -1,38 +0,0 @@ -[discrete] -[[esql-least]] -=== `LEAST` - -*Syntax* - -[.text-center] -image::esql/functions/signature/least.svg[Embedded,opts=inline] - -*Parameters* - -`first`:: -First of the columns to evaluate. - -`rest`:: -The rest of the columns to evaluate. - -*Description* - -Returns the minimum value from multiple columns. This is similar to -<> except it is intended to run on multiple columns at once. - -NOTE: When run on `keyword` or `text` fields, this returns the first string - in alphabetical order. When run on `boolean` columns this will return - `false` if any values are `false`. - -include::types/least.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/math.csv-spec[tag=least] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/math.csv-spec[tag=least-result] -|=== diff --git a/docs/reference/esql/functions/parameters/case.asciidoc b/docs/reference/esql/functions/parameters/case.asciidoc index c3617b7c0e32..ee6f7e499b3b 100644 --- a/docs/reference/esql/functions/parameters/case.asciidoc +++ b/docs/reference/esql/functions/parameters/case.asciidoc @@ -3,7 +3,7 @@ *Parameters* `condition`:: - +A condition. `trueValue`:: - +The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches. diff --git a/docs/reference/esql/functions/parameters/coalesce.asciidoc b/docs/reference/esql/functions/parameters/coalesce.asciidoc index 9b62a2e7e0d8..e0860c5bc303 100644 --- a/docs/reference/esql/functions/parameters/coalesce.asciidoc +++ b/docs/reference/esql/functions/parameters/coalesce.asciidoc @@ -3,7 +3,7 @@ *Parameters* `first`:: -Expression to evaluate +Expression to evaluate. `rest`:: -Other expression to evaluate +Other expression to evaluate. diff --git a/docs/reference/esql/functions/parameters/greatest.asciidoc b/docs/reference/esql/functions/parameters/greatest.asciidoc index 83ac29d0bf7c..8d23101aba7f 100644 --- a/docs/reference/esql/functions/parameters/greatest.asciidoc +++ b/docs/reference/esql/functions/parameters/greatest.asciidoc @@ -3,7 +3,7 @@ *Parameters* `first`:: - +First of the columns to evaluate. `rest`:: - +The rest of the columns to evaluate. diff --git a/docs/reference/esql/functions/parameters/least.asciidoc b/docs/reference/esql/functions/parameters/least.asciidoc index 83ac29d0bf7c..8d23101aba7f 100644 --- a/docs/reference/esql/functions/parameters/least.asciidoc +++ b/docs/reference/esql/functions/parameters/least.asciidoc @@ -3,7 +3,7 @@ *Parameters* `first`:: - +First of the columns to evaluate. `rest`:: - +The rest of the columns to evaluate. diff --git a/docs/reference/esql/functions/parameters/st_contains.asciidoc b/docs/reference/esql/functions/parameters/st_contains.asciidoc index e87a0d0eb94f..2f969f0f3cf0 100644 --- a/docs/reference/esql/functions/parameters/st_contains.asciidoc +++ b/docs/reference/esql/functions/parameters/st_contains.asciidoc @@ -3,7 +3,7 @@ *Parameters* `geomA`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. `geomB`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/parameters/st_disjoint.asciidoc b/docs/reference/esql/functions/parameters/st_disjoint.asciidoc index e87a0d0eb94f..2f969f0f3cf0 100644 --- a/docs/reference/esql/functions/parameters/st_disjoint.asciidoc +++ b/docs/reference/esql/functions/parameters/st_disjoint.asciidoc @@ -3,7 +3,7 @@ *Parameters* `geomA`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. `geomB`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/parameters/st_intersects.asciidoc b/docs/reference/esql/functions/parameters/st_intersects.asciidoc index e87a0d0eb94f..2f969f0f3cf0 100644 --- a/docs/reference/esql/functions/parameters/st_intersects.asciidoc +++ b/docs/reference/esql/functions/parameters/st_intersects.asciidoc @@ -3,7 +3,7 @@ *Parameters* `geomA`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. `geomB`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/parameters/st_within.asciidoc b/docs/reference/esql/functions/parameters/st_within.asciidoc index e87a0d0eb94f..2f969f0f3cf0 100644 --- a/docs/reference/esql/functions/parameters/st_within.asciidoc +++ b/docs/reference/esql/functions/parameters/st_within.asciidoc @@ -3,7 +3,7 @@ *Parameters* `geomA`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. `geomB`:: -Geometry column name or variable of geometry type +Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters. diff --git a/docs/reference/esql/functions/parameters/st_x.asciidoc b/docs/reference/esql/functions/parameters/st_x.asciidoc index 4e8e77dea1f8..b66bfc286a44 100644 --- a/docs/reference/esql/functions/parameters/st_x.asciidoc +++ b/docs/reference/esql/functions/parameters/st_x.asciidoc @@ -3,4 +3,4 @@ *Parameters* `point`:: - +Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/parameters/st_y.asciidoc b/docs/reference/esql/functions/parameters/st_y.asciidoc index 4e8e77dea1f8..b66bfc286a44 100644 --- a/docs/reference/esql/functions/parameters/st_y.asciidoc +++ b/docs/reference/esql/functions/parameters/st_y.asciidoc @@ -3,4 +3,4 @@ *Parameters* `point`:: - +Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`. diff --git a/docs/reference/esql/functions/spatial-functions.asciidoc b/docs/reference/esql/functions/spatial-functions.asciidoc index b6d178ddd624..d143681fcf2f 100644 --- a/docs/reference/esql/functions/spatial-functions.asciidoc +++ b/docs/reference/esql/functions/spatial-functions.asciidoc @@ -16,9 +16,9 @@ * experimental:[] <> // end::spatial_list[] -include::st_intersects.asciidoc[] -include::st_disjoint.asciidoc[] -include::st_contains.asciidoc[] -include::st_within.asciidoc[] -include::st_x.asciidoc[] -include::st_y.asciidoc[] +include::layout/st_intersects.asciidoc[] +include::layout/st_disjoint.asciidoc[] +include::layout/st_contains.asciidoc[] +include::layout/st_within.asciidoc[] +include::layout/st_x.asciidoc[] +include::layout/st_y.asciidoc[] diff --git a/docs/reference/esql/functions/st_contains.asciidoc b/docs/reference/esql/functions/st_contains.asciidoc deleted file mode 100644 index 110c4fe4ca9e..000000000000 --- a/docs/reference/esql/functions/st_contains.asciidoc +++ /dev/null @@ -1,26 +0,0 @@ -[discrete] -[[esql-st_contains]] -=== `ST_CONTAINS` - -experimental::[] - -*Syntax* - -[.text-center] -image::esql/functions/signature/st_contains.svg[Embedded,opts=inline] - -*Parameters* - -`geomA`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. - -`geomB`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. -The second parameter must also have the same coordinate system as the first. -This means it is not possible to combine `geo_*` and `cartesian_*` parameters. - -include::description/st_contains.asciidoc[] -This is the inverse of the <> function. - -include::types/st_contains.asciidoc[] -include::examples/st_contains.asciidoc[] diff --git a/docs/reference/esql/functions/st_disjoint.asciidoc b/docs/reference/esql/functions/st_disjoint.asciidoc deleted file mode 100644 index db89ca186a0f..000000000000 --- a/docs/reference/esql/functions/st_disjoint.asciidoc +++ /dev/null @@ -1,27 +0,0 @@ -[discrete] -[[esql-st_disjoint]] -=== `ST_DISJOINT` - -experimental::[] - -*Syntax* - -[.text-center] -image::esql/functions/signature/st_disjoint.svg[Embedded,opts=inline] - -*Parameters* - -`geomA`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. - -`geomB`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. -The second parameter must also have the same coordinate system as the first. -This means it is not possible to combine `geo_*` and `cartesian_*` parameters. - -include::description/st_disjoint.asciidoc[] -This is the inverse of the <> function. -In mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅ - -include::types/st_disjoint.asciidoc[] -include::examples/st_disjoint.asciidoc[] diff --git a/docs/reference/esql/functions/st_intersects.asciidoc b/docs/reference/esql/functions/st_intersects.asciidoc deleted file mode 100644 index d75a7f3a50e0..000000000000 --- a/docs/reference/esql/functions/st_intersects.asciidoc +++ /dev/null @@ -1,31 +0,0 @@ -[discrete] -[[esql-st_intersects]] -=== `ST_INTERSECTS` - -experimental::[] - -*Syntax* - -[.text-center] -image::esql/functions/signature/st_intersects.svg[Embedded,opts=inline] - -*Parameters* - -`geomA`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. - -`geomB`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. -The second parameter must also have the same coordinate system as the first. -This means it is not possible to combine `geo_*` and `cartesian_*` parameters. - -*Description* - -Returns true if two geometries intersect. -They intersect if they have any point in common, including their interior points -(points along lines or within polygons). -This is the inverse of the <> function. -In mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅ - -include::types/st_intersects.asciidoc[] -include::examples/st_intersects.asciidoc[] diff --git a/docs/reference/esql/functions/st_within.asciidoc b/docs/reference/esql/functions/st_within.asciidoc deleted file mode 100644 index 0f0190a9de63..000000000000 --- a/docs/reference/esql/functions/st_within.asciidoc +++ /dev/null @@ -1,26 +0,0 @@ -[discrete] -[[esql-st_within]] -=== `ST_WITHIN` - -experimental::[] - -*Syntax* - -[.text-center] -image::esql/functions/signature/st_within.svg[Embedded,opts=inline] - -*Parameters* - -`geomA`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. - -`geomB`:: -Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. If `null`, the function returns `null`. -The second parameter must also have the same coordinate system as the first. -This means it is not possible to combine `geo_*` and `cartesian_*` parameters. - -include::description/st_within.asciidoc[] -This is the inverse of the <> function. - -include::types/st_within.asciidoc[] -include::examples/st_within.asciidoc[] diff --git a/docs/reference/esql/functions/st_x.asciidoc b/docs/reference/esql/functions/st_x.asciidoc deleted file mode 100644 index eec48894b515..000000000000 --- a/docs/reference/esql/functions/st_x.asciidoc +++ /dev/null @@ -1,33 +0,0 @@ -[discrete] -[[esql-st_x]] -=== `ST_X` - -experimental::[] - -*Syntax* - -[.text-center] -image::esql/functions/signature/st_x.svg[Embedded,opts=inline] - -*Parameters* - -`point`:: -Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`. - -*Description* - -Extracts the `x` coordinate from the supplied point. -If the points is of type `geo_point` this is equivalent to extracting the `longitude` value. - -include::types/st_x.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/spatial.csv-spec[tag=st_x_y] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/spatial.csv-spec[tag=st_x_y-result] -|=== diff --git a/docs/reference/esql/functions/st_y.asciidoc b/docs/reference/esql/functions/st_y.asciidoc deleted file mode 100644 index 8fc7281e395d..000000000000 --- a/docs/reference/esql/functions/st_y.asciidoc +++ /dev/null @@ -1,33 +0,0 @@ -[discrete] -[[esql-st_y]] -=== `ST_Y` - -experimental::[] - -*Syntax* - -[.text-center] -image::esql/functions/signature/st_y.svg[Embedded,opts=inline] - -*Parameters* - -`point`:: -Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`. - -*Description* - -Extracts the `y` coordinate from the supplied point. -If the points is of type `geo_point` this is equivalent to extracting the `latitude` value. - -include::types/st_y.asciidoc[] - -*Example* - -[source.merge.styled,esql] ----- -include::{esql-specs}/spatial.csv-spec[tag=st_x_y] ----- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/spatial.csv-spec[tag=st_x_y-result] -|=== diff --git a/docs/reference/esql/functions/types/case.asciidoc b/docs/reference/esql/functions/types/case.asciidoc index e7d627ab915a..85e4193b5bf2 100644 --- a/docs/reference/esql/functions/types/case.asciidoc +++ b/docs/reference/esql/functions/types/case.asciidoc @@ -5,5 +5,15 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== condition | trueValue | result -keyword +boolean | boolean | boolean +boolean | cartesian_point | cartesian_point +boolean | datetime | datetime +boolean | double | double +boolean | geo_point | geo_point +boolean | integer | integer +boolean | ip | ip +boolean | long | long +boolean | text | text +boolean | unsigned_long | unsigned_long +boolean | version | version |=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec index 1d523640731d..bd52d3b26b33 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/meta.csv-spec @@ -28,8 +28,8 @@ double e() "boolean ends_with(str:keyword|text, suffix:keyword|text)" "double|integer|long|unsigned_long floor(number:double|integer|long|unsigned_long)" "keyword from_base64(string:keyword|text)" -"integer|long|double|boolean|keyword|text|ip|version greatest(first:integer|long|double|boolean|keyword|text|ip|version, ?rest...:integer|long|double|boolean|keyword|text|ip|version)" -"integer|long|double|boolean|keyword|text|ip|version least(first:integer|long|double|boolean|keyword|text|ip|version, ?rest...:integer|long|double|boolean|keyword|text|ip|version)" +"boolean|double|integer|ip|keyword|long|text|version greatest(first:boolean|double|integer|ip|keyword|long|text|version, ?rest...:boolean|double|integer|ip|keyword|long|text|version)" +"boolean|double|integer|ip|keyword|long|text|version least(first:boolean|double|integer|ip|keyword|long|text|version, ?rest...:boolean|double|integer|ip|keyword|long|text|version)" "keyword left(string:keyword|text, length:integer)" "integer length(string:keyword|text)" "integer locate(string:keyword|text, substring:keyword|text, ?start:integer)" @@ -123,10 +123,10 @@ atan2 |[y_coordinate, x_coordinate] |["double|integer|long|unsign avg |number |"double|integer|long" |[""] bin |[field, buckets, from, to] |["integer|long|double|date", "integer|double|date_period|time_duration", "integer|long|double|date", "integer|long|double|date"] |[Numeric or date expression from which to derive buckets., Target number of buckets., Start of the range. Can be a number or a date expressed as a string., End of the range. Can be a number or a date expressed as a string.] bucket |[field, buckets, from, to] |["integer|long|double|date", "integer|double|date_period|time_duration", "integer|long|double|date", "integer|long|double|date"] |[Numeric or date expression from which to derive buckets., Target number of buckets., Start of the range. Can be a number or a date expressed as a string., End of the range. Can be a number or a date expressed as a string.] -case |[condition, trueValue] |[boolean, "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |["", ""] +case |[condition, trueValue] |[boolean, "boolean|cartesian_point|date|double|geo_point|integer|ip|keyword|long|text|unsigned_long|version"] |[A condition., The value that's returned when the corresponding condition is the first to evaluate to `true`. The default value is returned when no condition matches.] ceil |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. cidr_match |[ip, blockX] |[ip, "keyword|text"] |[IP address of type `ip` (both IPv4 and IPv6 are supported)., CIDR block to test the IP against.] -coalesce |first |"boolean|text|integer|keyword|long" |Expression to evaluate +coalesce |first |"boolean|text|integer|keyword|long" |Expression to evaluate. concat |[string1, string2] |["keyword|text", "keyword|text"] |[Strings to concatenate., Strings to concatenate.] cos |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. cosh |angle |"double|integer|long|unsigned_long" |An angle, in radians. If `null`, the function returns `null`. @@ -141,8 +141,8 @@ e |null |null ends_with |[str, suffix] |["keyword|text", "keyword|text"] |[String expression. If `null`\, the function returns `null`., String expression. If `null`\, the function returns `null`.] floor |number |"double|integer|long|unsigned_long" |Numeric expression. If `null`, the function returns `null`. from_base64 |string |"keyword|text" |A base64 string. -greatest |first |"integer|long|double|boolean|keyword|text|ip|version" |[""] -least |first |"integer|long|double|boolean|keyword|text|ip|version" |[""] +greatest |first |"boolean|double|integer|ip|keyword|long|text|version" |First of the columns to evaluate. +least |first |"boolean|double|integer|ip|keyword|long|text|version" |First of the columns to evaluate. left |[string, length] |["keyword|text", integer] |[The string from which to return a substring., The number of characters to return.] length |string |"keyword|text" |String expression. If `null`, the function returns `null`. locate |[string, substring, start] |["keyword|text", "keyword|text", "integer"] |[An input string, A substring to locate in the input string, The start index] @@ -180,12 +180,12 @@ sinh |angle |"double|integer|long|unsigne split |[string, delim] |["keyword|text", "keyword|text"] |[String expression. If `null`\, the function returns `null`., Delimiter. Only single byte delimiters are currently supported.] sqrt |number |"double|integer|long|unsigned_long" |"Numeric expression. If `null`, the function returns `null`." st_centroid_ag|field |"geo_point|cartesian_point" |[""] -st_contains |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] -st_disjoint |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] -st_intersects |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] -st_within |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Geometry column name or variable of geometry type, Geometry column name or variable of geometry type] -st_x |point |"geo_point|cartesian_point" |[""] -st_y |point |"geo_point|cartesian_point" |[""] +st_contains |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`., Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters.] +st_disjoint |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`., Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters.] +st_intersects |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`., Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters.] +st_within |[geomA, geomB] |["geo_point|cartesian_point|geo_shape|cartesian_shape", "geo_point|cartesian_point|geo_shape|cartesian_shape"] |[Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`., Expression of type `geo_point`\, `cartesian_point`\, `geo_shape` or `cartesian_shape`. If `null`\, the function returns `null`. The second parameter must also have the same coordinate system as the first. This means it is not possible to combine `geo_*` and `cartesian_*` parameters.] +st_x |point |"geo_point|cartesian_point" |Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`. +st_y |point |"geo_point|cartesian_point" |Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`. starts_with |[str, prefix] |["keyword|text", "keyword|text"] |[String expression. If `null`\, the function returns `null`., String expression. If `null`\, the function returns `null`.] substring |[string, start, length] |["keyword|text", integer, integer] |[String expression. If `null`\, the function returns `null`., Start position., Length of the substring from the start position. Optional; if omitted\, all positions after `start` are returned.] sum |number |"double|integer|long" |[""] @@ -237,7 +237,7 @@ atan2 |The {wikipedia}/Atan2[angle] between the positive x-axis and the avg |The average of a numeric field. bin |Creates groups of values - buckets - out of a datetime or numeric input. The size of the buckets can either be provided directly, or chosen based on a recommended count and values range. bucket |Creates groups of values - buckets - out of a datetime or numeric input. The size of the buckets can either be provided directly, or chosen based on a recommended count and values range. -case |Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to true. +case |Accepts pairs of conditions and values. The function returns the value that belongs to the first condition that evaluates to `true`. If the number of arguments is odd, the last argument is the default value which is returned when no condition matches. If the number of arguments is even, and no condition matches, the function returns `null`. ceil |Round a number up to the nearest integer. cidr_match |Returns true if the provided IP is contained in one of the provided CIDR blocks. coalesce |Returns the first of its arguments that is not null. If all arguments are null, it returns `null`. @@ -255,8 +255,8 @@ e |Returns {wikipedia}/E_(mathematical_constant)[Euler's number]. ends_with |Returns a boolean that indicates whether a keyword string ends with another string. floor |Round a number down to the nearest integer. from_base64 |Decode a base64 string. -greatest |Returns the maximum value from many columns. -least |Returns the minimum value from many columns. +greatest |Returns the maximum value from multiple columns. This is similar to <> except it is intended to run on multiple columns at once. +least |Returns the minimum value from multiple columns. This is similar to <> except it is intended to run on multiple columns at once. left |Returns the substring that extracts 'length' chars from 'string' starting from the left. length |Returns the character length of a string. locate |Returns an integer that indicates the position of a keyword substring within another string @@ -294,12 +294,12 @@ sinh |Returns the {wikipedia}/Hyperbolic_functions[hyperbolic sine] of split |Split a single valued string into multiple strings. sqrt |Returns the square root of a number. The input can be any numeric value, the return value is always a double. Square roots of negative numbers and infinites are null. st_centroid_ag|The centroid of a spatial field. -st_contains |Returns whether the first geometry contains the second geometry. -st_disjoint |Returns whether the two geometries or geometry columns are disjoint. -st_intersects |Returns whether the two geometries or geometry columns intersect. -st_within |Returns whether the first geometry is within the second geometry. -st_x |Extracts the x-coordinate from a point geometry. -st_y |Extracts the y-coordinate from a point geometry. +st_contains |Returns whether the first geometry contains the second geometry. This is the inverse of the <> function. +st_disjoint |Returns whether the two geometries or geometry columns are disjoint. This is the inverse of the <> function. In mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅ +st_intersects |Returns true if two geometries intersect. They intersect if they have any point in common, including their interior points (points along lines or within polygons). This is the inverse of the <> function. In mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅ +st_within |Returns whether the first geometry is within the second geometry. This is the inverse of the <> function. +st_x |Extracts the `x` coordinate from the supplied point. If the points is of type `geo_point` this is equivalent to extracting the `longitude` value. +st_y |Extracts the `y` coordinate from the supplied point. If the points is of type `geo_point` this is equivalent to extracting the `latitude` value. starts_with |Returns a boolean that indicates whether a keyword string starts with another string. substring |Returns a substring of a string, specified by a start position and an optional length sum |The sum of a numeric field. @@ -370,8 +370,8 @@ e |double ends_with |boolean |[false, false] |false |false floor |"double|integer|long|unsigned_long" |false |false |false from_base64 |keyword |false |false |false -greatest |"integer|long|double|boolean|keyword|text|ip|version" |false |true |false -least |"integer|long|double|boolean|keyword|text|ip|version" |false |true |false +greatest |"boolean|double|integer|ip|keyword|long|text|version" |false |true |false +least |"boolean|double|integer|ip|keyword|long|text|version" |false |true |false left |keyword |[false, false] |false |false length |integer |false |false |false locate |integer |[false, false, true] |false |false diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java index f00e69ddaabe..1018a03762cc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java @@ -16,6 +16,7 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; @@ -59,12 +60,28 @@ record Condition(Expression condition, Expression value) {} "unsigned_long", "version" }, description = """ - Accepts pairs of conditions and values. - The function returns the value that belongs to the first condition that evaluates to true.""" + Accepts pairs of conditions and values. The function returns the value that + belongs to the first condition that evaluates to `true`. + + If the number of arguments is odd, the last argument is the default value which + is returned when no condition matches. If the number of arguments is even, and + no condition matches, the function returns `null`.""", + examples = { + @Example(description = "Determine whether employees are monolingual, bilingual, or polyglot:", file = "docs", tag = "case"), + @Example( + description = "Calculate the total connection success rate based on log messages:", + file = "conditional", + tag = "docsCaseSuccessRate" + ), + @Example( + description = "Calculate an hourly error rate as a percentage of the total number of log messages:", + file = "conditional", + tag = "docsCaseHourlyErrorRate" + ) } ) public Case( Source source, - @Param(name = "condition", type = { "boolean" }) Expression first, + @Param(name = "condition", type = { "boolean" }, description = "A condition.") Expression first, @Param( name = "trueValue", type = { @@ -79,7 +96,9 @@ public Case( "long", "text", "unsigned_long", - "version" } + "version" }, + description = "The value that's returned when the corresponding condition is the first to evaluate to `true`. " + + "The default value is returned when no condition matches." ) List rest ) { super(source, Stream.concat(Stream.of(first), rest.stream()).toList()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java index 1794258402ae..b1c761a50d8b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; @@ -37,14 +38,26 @@ public class Greatest extends EsqlScalarFunction implements OptionalArgument { private DataType dataType; @FunctionInfo( - returnType = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }, - description = "Returns the maximum value from many columns." + returnType = { "boolean", "double", "integer", "ip", "keyword", "long", "text", "version" }, + description = "Returns the maximum value from multiple columns. This is similar to <>\n" + + "except it is intended to run on multiple columns at once.", + note = "When run on `keyword` or `text` fields, this returns the last string in alphabetical order. " + + "When run on `boolean` columns this will return `true` if any values are `true`.", + examples = @Example(file = "math", tag = "greatest") ) public Greatest( Source source, - @Param(name = "first", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }) Expression first, - @Param(name = "rest", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }, optional = true) List< - Expression> rest + @Param( + name = "first", + type = { "boolean", "double", "integer", "ip", "keyword", "long", "text", "version" }, + description = "First of the columns to evaluate." + ) Expression first, + @Param( + name = "rest", + type = { "boolean", "double", "integer", "ip", "keyword", "long", "text", "version" }, + description = "The rest of the columns to evaluate.", + optional = true + ) List rest ) { super(source, Stream.concat(Stream.of(first), rest.stream()).toList()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java index 6b4208f7b3d8..8b68196af68a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; @@ -37,14 +38,24 @@ public class Least extends EsqlScalarFunction implements OptionalArgument { private DataType dataType; @FunctionInfo( - returnType = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }, - description = "Returns the minimum value from many columns." + returnType = { "boolean", "double", "integer", "ip", "keyword", "long", "text", "version" }, + description = "Returns the minimum value from multiple columns. " + + "This is similar to <> except it is intended to run on multiple columns at once.", + examples = @Example(file = "math", tag = "least") ) public Least( Source source, - @Param(name = "first", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }) Expression first, - @Param(name = "rest", type = { "integer", "long", "double", "boolean", "keyword", "text", "ip", "version" }, optional = true) List< - Expression> rest + @Param( + name = "first", + type = { "boolean", "double", "integer", "ip", "keyword", "long", "text", "version" }, + description = "First of the columns to evaluate." + ) Expression first, + @Param( + name = "rest", + type = { "boolean", "double", "integer", "ip", "keyword", "long", "text", "version" }, + description = "The rest of the columns to evaluate.", + optional = true + ) List rest ) { super(source, Stream.concat(Stream.of(first), rest.stream()).toList()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java index 98dc0c7e83d9..8c39a29f67f9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java @@ -52,12 +52,12 @@ public Coalesce( @Param( name = "first", type = { "boolean", "text", "integer", "keyword", "long" }, - description = "Expression to evaluate" + description = "Expression to evaluate." ) Expression first, @Param( name = "rest", type = { "boolean", "text", "integer", "keyword", "long" }, - description = "Other expression to evaluate", + description = "Other expression to evaluate.", optional = true ) List rest ) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java index 279f31e34ac9..31e0a86a1e3e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialContains.java @@ -111,7 +111,9 @@ private boolean pointRelatesGeometries(long encoded, Component2D[] rightComponen @FunctionInfo( returnType = { "boolean" }, - description = "Returns whether the first geometry contains the second geometry.", + description = """ + Returns whether the first geometry contains the second geometry. + This is the inverse of the <> function.""", examples = @Example(file = "spatial_shapes", tag = "st_contains-airport_city_boundaries") ) public SpatialContains( @@ -119,12 +121,16 @@ public SpatialContains( @Param( name = "geomA", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`." ) Expression left, @Param( name = "geomB", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`.\n" + + "The second parameter must also have the same coordinate system as the first.\n" + + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters." ) Expression right ) { this(source, left, right, false, false); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java index 7833f93b6270..7b85ebfea5ee 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialDisjoint.java @@ -65,7 +65,10 @@ public class SpatialDisjoint extends SpatialRelatesFunction { @FunctionInfo( returnType = { "boolean" }, - description = "Returns whether the two geometries or geometry columns are disjoint.", + description = """ + Returns whether the two geometries or geometry columns are disjoint. + This is the inverse of the <> function. + In mathematical terms: ST_Disjoint(A, B) ⇔ A ⋂ B = ∅""", examples = @Example(file = "spatial_shapes", tag = "st_disjoint-airport_city_boundaries") ) public SpatialDisjoint( @@ -73,12 +76,16 @@ public SpatialDisjoint( @Param( name = "geomA", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`." ) Expression left, @Param( name = "geomB", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`.\n" + + "The second parameter must also have the same coordinate system as the first.\n" + + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters." ) Expression right ) { this(source, left, right, false, false); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java index 810e3206ada7..462f3bce1aee 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialIntersects.java @@ -63,22 +63,27 @@ public class SpatialIntersects extends SpatialRelatesFunction { new CartesianShapeIndexer("ST_Intersects") ); - @FunctionInfo( - returnType = { "boolean" }, - description = "Returns whether the two geometries or geometry columns intersect.", - examples = @Example(file = "spatial", tag = "st_intersects-airports") - ) + @FunctionInfo(returnType = { "boolean" }, description = """ + Returns true if two geometries intersect. + They intersect if they have any point in common, including their interior points + (points along lines or within polygons). + This is the inverse of the <> function. + In mathematical terms: ST_Intersects(A, B) ⇔ A ⋂ B ≠ ∅""", examples = @Example(file = "spatial", tag = "st_intersects-airports")) public SpatialIntersects( Source source, @Param( name = "geomA", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`." ) Expression left, @Param( name = "geomB", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`.\n" + + "The second parameter must also have the same coordinate system as the first.\n" + + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters." ) Expression right ) { this(source, left, right, false, false); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java index ca285ca07e27..1eaf1e31e543 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialWithin.java @@ -66,7 +66,9 @@ public class SpatialWithin extends SpatialRelatesFunction implements SurrogateEx @FunctionInfo( returnType = { "boolean" }, - description = "Returns whether the first geometry is within the second geometry.", + description = """ + Returns whether the first geometry is within the second geometry. + This is the inverse of the <> function.""", examples = @Example(file = "spatial_shapes", tag = "st_within-airport_city_boundaries") ) public SpatialWithin( @@ -74,12 +76,16 @@ public SpatialWithin( @Param( name = "geomA", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`." ) Expression left, @Param( name = "geomB", type = { "geo_point", "cartesian_point", "geo_shape", "cartesian_shape" }, - description = "Geometry column name or variable of geometry type" + description = "Expression of type `geo_point`, `cartesian_point`, `geo_shape` or `cartesian_shape`. " + + "If `null`, the function returns `null`.\n" + + "The second parameter must also have the same coordinate system as the first.\n" + + "This means it is not possible to combine `geo_*` and `cartesian_*` parameters." ) Expression right ) { this(source, left, right, false, false); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StX.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StX.java index f86be9290fed..f5ff933babc9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StX.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StX.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.ConvertEvaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; @@ -34,8 +35,20 @@ * Alternatively it is well described in PostGIS documentation at PostGIS:ST_X. */ public class StX extends UnaryScalarFunction { - @FunctionInfo(returnType = "double", description = "Extracts the x-coordinate from a point geometry.") - public StX(Source source, @Param(name = "point", type = { "geo_point", "cartesian_point" }) Expression field) { + @FunctionInfo( + returnType = "double", + description = "Extracts the `x` coordinate from the supplied point.\n" + + "If the points is of type `geo_point` this is equivalent to extracting the `longitude` value.", + examples = @Example(file = "spatial", tag = "st_x_y") + ) + public StX( + Source source, + @Param( + name = "point", + type = { "geo_point", "cartesian_point" }, + description = "Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`." + ) Expression field + ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StY.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StY.java index 759c23c73374..48de97da4bef 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StY.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StY.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.ConvertEvaluator; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; @@ -34,8 +35,20 @@ * Alternatively it is well described in PostGIS documentation at PostGIS:ST_Y. */ public class StY extends UnaryScalarFunction { - @FunctionInfo(returnType = "double", description = "Extracts the y-coordinate from a point geometry.") - public StY(Source source, @Param(name = "point", type = { "geo_point", "cartesian_point" }) Expression field) { + @FunctionInfo( + returnType = "double", + description = "Extracts the `y` coordinate from the supplied point.\n" + + "If the points is of type `geo_point` this is equivalent to extracting the `latitude` value.", + examples = @Example(file = "spatial", tag = "st_x_y") + ) + public StY( + Source source, + @Param( + name = "point", + type = { "geo_point", "cartesian_point" }, + description = "Expression of type `geo_point` or `cartesian_point`. If `null`, the function returns `null`." + ) Expression field + ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java index 90692d5b19df..ee23cf00a37a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; +import java.math.BigInteger; import java.util.List; import java.util.function.Function; import java.util.function.Supplier; @@ -32,6 +33,7 @@ import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; public class CaseTests extends AbstractFunctionTestCase { @@ -44,26 +46,173 @@ public CaseTests(@Name("TestCase") Supplier testCaseS */ @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("basics", () -> { - List typedData = List.of( - new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), - new TestCaseSupplier.TypedData(new BytesRef("a"), DataTypes.KEYWORD, "a"), - new TestCaseSupplier.TypedData(new BytesRef("b"), DataTypes.KEYWORD, "b") - ); - return new TestCaseSupplier.TestCase( - typedData, - "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " - + "value=Attribute[channel=1]]], elseVal=Attribute[channel=2]]", - DataTypes.KEYWORD, - equalTo(new BytesRef("a")) - ); - }))); + return parameterSuppliersFromTypedData( + List.of(new TestCaseSupplier("keyword", List.of(DataTypes.BOOLEAN, DataTypes.KEYWORD, DataTypes.KEYWORD), () -> { + List typedData = List.of( + new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(new BytesRef("a"), DataTypes.KEYWORD, "a"), + new TestCaseSupplier.TypedData(new BytesRef("b"), DataTypes.KEYWORD, "b") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=Attribute[channel=2]]", + DataTypes.KEYWORD, + equalTo(new BytesRef("a")) + ); + }), new TestCaseSupplier("text", List.of(DataTypes.BOOLEAN, DataTypes.TEXT), () -> { + List typedData = List.of( + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(new BytesRef("a"), DataTypes.TEXT, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.TEXT, + nullValue() + ); + }), new TestCaseSupplier("boolean", List.of(DataTypes.BOOLEAN, DataTypes.BOOLEAN), () -> { + List typedData = List.of( + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=BOOLEAN, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.BOOLEAN, + nullValue() + ); + }), new TestCaseSupplier("date", List.of(DataTypes.BOOLEAN, DataTypes.DATETIME), () -> { + long value = randomNonNegativeLong(); + List typedData = List.of( + new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataTypes.DATETIME, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=LONG, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.DATETIME, + equalTo(value) + ); + }), new TestCaseSupplier("double", List.of(DataTypes.BOOLEAN, DataTypes.DOUBLE), () -> { + double value = randomDouble(); + List typedData = List.of( + new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataTypes.DOUBLE, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=DOUBLE, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.DOUBLE, + equalTo(value) + ); + }), new TestCaseSupplier("integer", List.of(DataTypes.BOOLEAN, DataTypes.INTEGER), () -> { + int value = randomInt(); + List typedData = List.of( + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataTypes.INTEGER, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=INT, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.INTEGER, + nullValue() + ); + }), new TestCaseSupplier("long", List.of(DataTypes.BOOLEAN, DataTypes.LONG), () -> { + long value = randomLong(); + List typedData = List.of( + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataTypes.LONG, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=LONG, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.LONG, + nullValue() + ); + }), new TestCaseSupplier("unsigned_long", List.of(DataTypes.BOOLEAN, DataTypes.UNSIGNED_LONG), () -> { + BigInteger value = randomUnsignedLongBetween(BigInteger.ZERO, UNSIGNED_LONG_MAX); + List typedData = List.of( + new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataTypes.UNSIGNED_LONG, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=LONG, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.UNSIGNED_LONG, + equalTo(value) + ); + }), new TestCaseSupplier("ip", List.of(DataTypes.BOOLEAN, DataTypes.IP), () -> { + BytesRef value = (BytesRef) randomLiteral(DataTypes.IP).value(); + List typedData = List.of( + new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataTypes.IP, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.IP, + equalTo(value) + ); + }), new TestCaseSupplier("version", List.of(DataTypes.BOOLEAN, DataTypes.VERSION), () -> { + BytesRef value = (BytesRef) randomLiteral(DataTypes.VERSION).value(); + List typedData = List.of( + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, DataTypes.VERSION, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + DataTypes.VERSION, + nullValue() + ); + }), new TestCaseSupplier("cartesian_point", List.of(DataTypes.BOOLEAN, EsqlDataTypes.CARTESIAN_POINT), () -> { + BytesRef value = (BytesRef) randomLiteral(EsqlDataTypes.CARTESIAN_POINT).value(); + List typedData = List.of( + new TestCaseSupplier.TypedData(false, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, EsqlDataTypes.CARTESIAN_POINT, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + EsqlDataTypes.CARTESIAN_POINT, + nullValue() + ); + }), new TestCaseSupplier("geo_point", List.of(DataTypes.BOOLEAN, EsqlDataTypes.GEO_POINT), () -> { + BytesRef value = (BytesRef) randomLiteral(EsqlDataTypes.GEO_POINT).value(); + List typedData = List.of( + new TestCaseSupplier.TypedData(true, DataTypes.BOOLEAN, "cond"), + new TestCaseSupplier.TypedData(value, EsqlDataTypes.GEO_POINT, "trueValue") + ); + return new TestCaseSupplier.TestCase( + typedData, + "CaseEvaluator[resultType=BYTES_REF, conditions=[ConditionEvaluator[condition=Attribute[channel=0], " + + "value=Attribute[channel=1]]], elseVal=LiteralsEvaluator[lit=null]]", + EsqlDataTypes.GEO_POINT, + equalTo(value) + ); + })) + ); } @Override protected void assertSimpleWithNulls(List data, Block value, int nullBlock) { if (nullBlock == 0) { - assertThat(toJavaObject(value, 0), equalTo(data.get(2))); + if (data.size() == 2) { + assertThat(value.isNull(0), equalTo(true)); + } else if (data.size() > 2) { + assertThat(toJavaObject(value, 0), equalTo(data.get(2))); + } return; } if (((Boolean) data.get(0)).booleanValue()) { @@ -77,7 +226,11 @@ protected void assertSimpleWithNulls(List data, Block value, int nullBlo if (nullBlock == 2) { super.assertSimpleWithNulls(data, value, nullBlock); } else { - assertThat(toJavaObject(value, 0), equalTo(data.get(2))); + if (data.size() > 2) { + assertThat(toJavaObject(value, 0), equalTo(data.get(2))); + } else { + super.assertSimpleWithNulls(data, value, nullBlock); + } } } From cb435733a0b1655e2cb47748c02a89c86314f0e9 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 10 May 2024 14:53:56 -0400 Subject: [PATCH 058/119] ESQL: Reduce number of ignored tests (#108471) This reduces the number of skipped tests for functions, dropping the number from 130754 to 3226. This doesn't buy us much more coverage, but it doesn't really take any more time so it's probably ok. It'd be nice to have some understanding of each of the skipped tests, 130k is way too many to put in your head. The actual test change is: when you are need to build an evaluator but can't because you'll get a type error, just assert that you get a type error and let the test finish. This is nearly as fast as just bailing, and it gets us to the point where we can start reasoning about the skipped tests. --- .../function/AbstractFunctionTestCase.java | 61 ++++++++++++------- 1 file changed, 40 insertions(+), 21 deletions(-) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 1fd7cfe36806..4867b0c62a18 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -166,14 +166,17 @@ protected static Iterable parameterSuppliersFromTypedData(List allNullsMatcher() { } private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext context, boolean insertNulls) { + Expression expression = randomBoolean() ? buildDeepCopyOfFieldExpression(testCase) : buildFieldExpression(testCase); + if (testCase.getExpectedTypeError() != null) { + assertTypeResolutionFailure(expression); + return; + } assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); assumeTrue("Expected type must be representable to build an evaluator", EsqlDataTypes.isRepresentable(testCase.expectedType())); - assumeTrue("Must build evaluator to test sending it blocks", testCase.getExpectedTypeError() == null); - boolean readFloating = randomBoolean(); int positions = between(1, 1024); List data = testCase.getData(); Page onePositionPage = row(testCase.getDataValues()); @@ -401,7 +403,6 @@ private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext con } b++; } - Expression expression = readFloating ? buildDeepCopyOfFieldExpression(testCase) : buildFieldExpression(testCase); try ( ExpressionEvaluator eval = evaluator(expression).get(context); Block block = eval.eval(new Page(positions, manyPositionsBlocks)) @@ -427,13 +428,15 @@ private void testEvaluateBlock(BlockFactory inputBlockFactory, DriverContext con } } - // TODO cranky time - public void testSimpleWithNulls() { // TODO replace this with nulls inserted into the test case like anyNullIsNull + Expression expression = buildFieldExpression(testCase); + if (testCase.getExpectedTypeError() != null) { + assertTypeResolutionFailure(expression); + return; + } assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); - assumeTrue("Nothing to do if a type error", testCase.getExpectedTypeError() == null); List simpleData = testCase.getDataValues(); - try (EvalOperator.ExpressionEvaluator eval = evaluator(buildFieldExpression(testCase)).get(driverContext())) { + try (EvalOperator.ExpressionEvaluator eval = evaluator(expression).get(driverContext())) { BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); Block[] orig = BlockUtils.fromListRow(blockFactory, simpleData); for (int i = 0; i < orig.length; i++) { @@ -472,12 +475,16 @@ protected void assertSimpleWithNulls(List data, Block value, int nullBlo } public final void testEvaluateInManyThreads() throws ExecutionException, InterruptedException { + Expression expression = buildFieldExpression(testCase); + if (testCase.getExpectedTypeError() != null) { + assertTypeResolutionFailure(expression); + return; + } assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); assumeTrue("Expected type must be representable to build an evaluator", EsqlDataTypes.isRepresentable(testCase.expectedType())); - assumeTrue("Nothing to do if a type error", testCase.getExpectedTypeError() == null); int count = 10_000; int threads = 5; - var evalSupplier = evaluator(buildFieldExpression(testCase)); + var evalSupplier = evaluator(expression); ExecutorService exec = Executors.newFixedThreadPool(threads); try { List> futures = new ArrayList<>(); @@ -504,17 +511,25 @@ public final void testEvaluateInManyThreads() throws ExecutionException, Interru } public final void testEvaluatorToString() { + Expression expression = buildFieldExpression(testCase); + if (testCase.getExpectedTypeError() != null) { + assertTypeResolutionFailure(expression); + return; + } assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); - assumeTrue("Nothing to do if a type error", testCase.getExpectedTypeError() == null); - var factory = evaluator(buildFieldExpression(testCase)); + var factory = evaluator(expression); try (ExpressionEvaluator ev = factory.get(driverContext())) { assertThat(ev.toString(), testCase.evaluatorToString()); } } public final void testFactoryToString() { + Expression expression = buildFieldExpression(testCase); + if (testCase.getExpectedTypeError() != null) { + assertTypeResolutionFailure(expression); + return; + } assumeTrue("Can't build evaluator", testCase.canBuildEvaluator()); - assumeTrue("Nothing to do if a type error", testCase.getExpectedTypeError() == null); var factory = evaluator(buildFieldExpression(testCase)); assertThat(factory.toString(), testCase.evaluatorToString()); } @@ -522,8 +537,7 @@ public final void testFactoryToString() { public final void testFold() { Expression expression = buildLiteralExpression(testCase); if (testCase.getExpectedTypeError() != null) { - assertTrue(expression.typeResolved().unresolved()); - assertThat(expression.typeResolved().message(), equalTo(testCase.getExpectedTypeError())); + assertTypeResolutionFailure(expression); return; } assertFalse(expression.typeResolved().unresolved()); @@ -1115,6 +1129,11 @@ protected static DataType[] representableNonSpatialTypes() { return representableNonSpatial().toArray(DataType[]::new); } + protected final void assertTypeResolutionFailure(Expression expression) { + assertTrue("expected unresolved", expression.typeResolved().unresolved()); + assertThat(expression.typeResolved().message(), equalTo(testCase.getExpectedTypeError())); + } + @AfterClass public static void renderSignature() throws IOException { if (System.getProperty("generateDocs") == null) { From 7cc433507aad23fc79634ccbd68fc170f4191ae8 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 10 May 2024 15:31:08 -0400 Subject: [PATCH 059/119] ESQL: Support "capabilites" in the csv-spec tests (#108464) This flips the csv-spec construct called `required_feature:` from using "cluster features" to using "cluster capabilities". "Features" are a "heavy" concept that live in the cluster state and should be used for quickly checking things on the local node. "Capabilities" are fairly fluid list of strings that live on each node and are calculated on the fly so much nicer for testing. This adds all existing "cluster features" for esql as "cluster capabilities" for the ESQL `_query` and `_query/async` actions. The tests just check that. In a follow-up change I'll replace the syntax `required_feature:` with `required_capability:`. Our esql capabilities all starts with `esql.` - but capabilities are naturally scoped to the endpoint. So I've removed the `esql.` from the capabilities we add. --- .../xpack/esql/ccq/MultiClusterSpecIT.java | 22 +++++---- .../xpack/esql/qa/rest/EsqlSpecTestCase.java | 37 ++++++++++++-- .../xpack/esql/action/EsqlCapabilities.java | 48 +++++++++++++++++++ .../esql/action/RestEsqlAsyncQueryAction.java | 5 ++ .../esql/action/RestEsqlQueryAction.java | 5 ++ .../elasticsearch/xpack/esql/CsvTests.java | 5 +- .../elasticsearch/xpack/ql/CsvSpecReader.java | 10 ++-- 7 files changed, 113 insertions(+), 19 deletions(-) create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java index ca084ab26908..8f13dd53a0d2 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java +++ b/x-pack/plugin/esql/qa/server/multi-clusters/src/javaRestTest/java/org/elasticsearch/xpack/esql/ccq/MultiClusterSpecIT.java @@ -67,7 +67,7 @@ public class MultiClusterSpecIT extends EsqlSpecTestCase { public static TestRule clusterRule = RuleChain.outerRule(remoteCluster).around(localCluster); private static TestFeatureService remoteFeaturesService; - private static RestClient remoteFeaturesServiceClient; + private static RestClient remoteClusterClient; @ParametersFactory(argumentFormatting = "%2$s.%3$s") public static List readScriptSpec() throws Exception { @@ -95,30 +95,34 @@ public MultiClusterSpecIT(String fileName, String groupName, String testName, In @Override protected void shouldSkipTest(String testName) throws IOException { super.shouldSkipTest(testName); - for (String feature : testCase.requiredFeatures) { - assumeTrue("Test " + testName + " requires " + feature, remoteFeaturesService().clusterHasFeature(feature)); - } + checkCapabilities(remoteClusterClient(), remoteFeaturesService(), testName, testCase); assumeFalse("can't test with _index metadata", hasIndexMetadata(testCase.query)); assumeTrue("Test " + testName + " is skipped on " + Clusters.oldVersion(), isEnabled(testName, Clusters.oldVersion())); } private TestFeatureService remoteFeaturesService() throws IOException { if (remoteFeaturesService == null) { - HttpHost[] remoteHosts = parseClusterHosts(remoteCluster.getHttpAddresses()).toArray(HttpHost[]::new); - remoteFeaturesServiceClient = super.buildClient(restAdminSettings(), remoteHosts); - var remoteNodeVersions = readVersionsFromNodesInfo(remoteFeaturesServiceClient); + var remoteNodeVersions = readVersionsFromNodesInfo(remoteClusterClient()); var semanticNodeVersions = remoteNodeVersions.stream() .map(ESRestTestCase::parseLegacyVersion) .flatMap(Optional::stream) .collect(Collectors.toSet()); - remoteFeaturesService = createTestFeatureService(getClusterStateFeatures(remoteFeaturesServiceClient), semanticNodeVersions); + remoteFeaturesService = createTestFeatureService(getClusterStateFeatures(remoteClusterClient()), semanticNodeVersions); } return remoteFeaturesService; } + private RestClient remoteClusterClient() throws IOException { + if (remoteClusterClient == null) { + HttpHost[] remoteHosts = parseClusterHosts(remoteCluster.getHttpAddresses()).toArray(HttpHost[]::new); + remoteClusterClient = super.buildClient(restAdminSettings(), remoteHosts); + } + return remoteClusterClient; + } + @AfterClass public static void closeRemoveFeaturesService() throws IOException { - IOUtils.close(remoteFeaturesServiceClient); + IOUtils.close(remoteClusterClient); } @Override diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java index fc65cb990f82..0b653a1d9210 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/EsqlSpecTestCase.java @@ -15,6 +15,7 @@ import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.RestClient; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.Point; @@ -23,6 +24,7 @@ import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.TestFeatureService; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.esql.CsvTestUtils; import org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.RequestObjectBuilder; @@ -150,12 +152,41 @@ public final void test() throws Throwable { } protected void shouldSkipTest(String testName) throws IOException { - for (String feature : testCase.requiredFeatures) { - assumeTrue("Test " + testName + " requires " + feature, clusterHasFeature(feature)); - } + checkCapabilities(adminClient(), testFeatureService, testName, testCase); assumeTrue("Test " + testName + " is not enabled", isEnabled(testName, Version.CURRENT)); } + protected static void checkCapabilities(RestClient client, TestFeatureService testFeatureService, String testName, CsvTestCase testCase) + throws IOException { + if (testCase.requiredCapabilities.isEmpty()) { + return; + } + try { + if (clusterHasCapability(client, "POST", "/_query", List.of(), testCase.requiredCapabilities).orElse(false)) { + return; + } + LOGGER.info("capabilities API returned false, we might be in a mixed version cluster so falling back to cluster features"); + } catch (ResponseException e) { + if (e.getResponse().getStatusLine().getStatusCode() / 100 == 4) { + /* + * The node we're testing against is too old for the capabilities + * API which means it has to be pretty old. Very old capabilities + * are ALSO present in the features API, so we can check them instead. + * + * It's kind of weird that we check for *any* 400, but that's required + * because old versions of Elasticsearch return 400, not the expected + * 404. + */ + LOGGER.info("capabilities API failed, falling back to cluster features"); + } else { + throw e; + } + } + for (String feature : testCase.requiredCapabilities) { + assumeTrue("Test " + testName + " requires " + feature, testFeatureService.clusterHasFeature("esql." + feature)); + } + } + protected final void doTest() throws Throwable { RequestObjectBuilder builder = new RequestObjectBuilder(randomFrom(XContentType.values())); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java new file mode 100644 index 000000000000..fa23466f54f8 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.rest.action.admin.cluster.RestNodesCapabilitiesAction; +import org.elasticsearch.xpack.esql.plugin.EsqlFeatures; + +import java.util.ArrayList; +import java.util.List; +import java.util.Set; + +/** + * A {@link Set} of "capabilities" supported by the {@link RestEsqlQueryAction} + * and {@link RestEsqlAsyncQueryAction} APIs. These are exposed over the + * {@link RestNodesCapabilitiesAction} and we use them to enable tests. + */ +public class EsqlCapabilities { + static final Set CAPABILITIES = capabilities(); + + private static Set capabilities() { + /* + * Add all of our cluster features without the leading "esql." + */ + List caps = new ArrayList<>(); + for (NodeFeature feature : new EsqlFeatures().getFeatures()) { + caps.add(cap(feature)); + } + for (NodeFeature feature : new EsqlFeatures().getHistoricalFeatures().keySet()) { + caps.add(cap(feature)); + } + return Set.copyOf(caps); + } + + /** + * Convert a {@link NodeFeature} from {@link EsqlFeatures} into a + * capability. + */ + public static String cap(NodeFeature feature) { + assert feature.id().startsWith("esql."); + return feature.id().substring("esql.".length()); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java index 3f0289d49535..ad47779fffbb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java @@ -39,6 +39,11 @@ public List routes() { return List.of(new Route(POST, "/_query/async")); } + @Override + public Set supportedCapabilities() { + return EsqlCapabilities.CAPABILITIES; + } + @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { EsqlQueryRequest esqlRequest; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java index 97a7f8e0e9e7..268966422ce5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java @@ -39,6 +39,11 @@ public List routes() { return List.of(new Route(POST, "/_query")); } + @Override + public Set supportedCapabilities() { + return EsqlCapabilities.CAPABILITIES; + } + @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { EsqlQueryRequest esqlRequest; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 86f595810a49..cb8700d5d760 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -107,6 +107,7 @@ import static org.elasticsearch.xpack.esql.CsvTestsDataLoader.CSV_DATASET_MAP; import static org.elasticsearch.xpack.esql.EsqlTestUtils.TEST_VERIFIER; import static org.elasticsearch.xpack.esql.EsqlTestUtils.loadMapping; +import static org.elasticsearch.xpack.esql.action.EsqlCapabilities.cap; import static org.elasticsearch.xpack.ql.CsvSpecReader.specParser; import static org.elasticsearch.xpack.ql.TestUtils.classpathResources; import static org.hamcrest.Matchers.equalTo; @@ -222,8 +223,8 @@ public final void test() throws Throwable { * The csv tests support all but a few features. The unsupported features * are tested in integration tests. */ - assumeFalse("metadata fields aren't supported", testCase.requiredFeatures.contains(EsqlFeatures.METADATA_FIELDS.id())); - assumeFalse("enrich can't load fields in csv tests", testCase.requiredFeatures.contains(EsqlFeatures.ENRICH_LOAD.id())); + assumeFalse("metadata fields aren't supported", testCase.requiredCapabilities.contains(cap(EsqlFeatures.METADATA_FIELDS))); + assumeFalse("enrich can't load fields in csv tests", testCase.requiredCapabilities.contains(cap(EsqlFeatures.ENRICH_LOAD))); doTest(); } catch (Throwable th) { throw reworkException(th); diff --git a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java index 7243eae34ac6..45d3653a28b6 100644 --- a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java +++ b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java @@ -31,7 +31,7 @@ public static class CsvSpecParser implements SpecReader.Parser { private final StringBuilder earlySchema = new StringBuilder(); private final StringBuilder query = new StringBuilder(); private final StringBuilder data = new StringBuilder(); - private final List requiredFeatures = new ArrayList<>(); + private final List requiredCapabilities = new ArrayList<>(); private CsvTestCase testCase; private CsvSpecParser() {} @@ -44,7 +44,7 @@ public Object parse(String line) { assertThat("Early schema already declared " + earlySchema, earlySchema.length(), is(0)); earlySchema.append(line.substring(SCHEMA_PREFIX.length()).trim()); } else if (line.toLowerCase(Locale.ROOT).startsWith("required_feature:")) { - requiredFeatures.add(line.substring("required_feature:".length()).trim()); + requiredCapabilities.add(line.substring("required_feature:".length()).trim().replace("esql.", "")); } else { if (line.endsWith(";")) { // pick up the query @@ -52,8 +52,8 @@ public Object parse(String line) { query.append(line.substring(0, line.length() - 1).trim()); testCase.query = query.toString(); testCase.earlySchema = earlySchema.toString(); - testCase.requiredFeatures = List.copyOf(requiredFeatures); - requiredFeatures.clear(); + testCase.requiredCapabilities = List.copyOf(requiredCapabilities); + requiredCapabilities.clear(); earlySchema.setLength(0); query.setLength(0); } @@ -111,7 +111,7 @@ public static class CsvTestCase { private final List expectedWarningsRegexString = new ArrayList<>(); private final List expectedWarningsRegex = new ArrayList<>(); public boolean ignoreOrder; - public List requiredFeatures = List.of(); + public List requiredCapabilities = List.of(); // The emulated-specific warnings must always trail the non-emulated ones, if these are present. Otherwise, the closing bracket // would need to be changed to a less common sequence (like `]#` maybe). From b9e397b31237072c5d049d977e4cc250e08c72ad Mon Sep 17 00:00:00 2001 From: Henning Andersen <33268011+henningandersen@users.noreply.github.com> Date: Fri, 10 May 2024 23:28:20 +0200 Subject: [PATCH 060/119] Add shard closing listener (#108317) After #108145, the after shard closed listener is no longer called on the cluster state applier thread. Introduce another event that is called on the applier thread. Relates #108145 --- .../index/CompositeIndexEventListener.java | 12 ++++++++++++ .../java/org/elasticsearch/index/IndexService.java | 1 + .../index/shard/IndexEventListener.java | 7 +++++++ 3 files changed, 20 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java b/server/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java index 8b087f5a302d..047c38138fda 100644 --- a/server/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java +++ b/server/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java @@ -95,6 +95,18 @@ public void beforeIndexShardClosed(ShardId shardId, @Nullable IndexShard indexSh } } + @Override + public void afterIndexShardClosing(ShardId shardId, @Nullable IndexShard indexShard, Settings indexSettings) { + for (IndexEventListener listener : listeners) { + try { + listener.afterIndexShardClosing(shardId, indexShard, indexSettings); + } catch (Exception e) { + logger.warn(() -> "[" + shardId.getId() + "] failed to invoke after shard closing callback", e); + throw e; + } + } + } + @Override public void afterIndexShardClosed(ShardId shardId, @Nullable IndexShard indexShard, Settings indexSettings) { for (IndexEventListener listener : listeners) { diff --git a/server/src/main/java/org/elasticsearch/index/IndexService.java b/server/src/main/java/org/elasticsearch/index/IndexService.java index eb0672f7ad12..88db674c3ec2 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexService.java +++ b/server/src/main/java/org/elasticsearch/index/IndexService.java @@ -649,6 +649,7 @@ public void onFailure(Exception e) { onResponse(null); // otherwise ignore the exception } }, l -> indexShard.close(reason, flushEngine, closeExecutor, l)); + listener.afterIndexShardClosing(sId, indexShard, indexSettings); } } } finally { diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexEventListener.java b/server/src/main/java/org/elasticsearch/index/shard/IndexEventListener.java index 5bea31d2d204..b27a27588975 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexEventListener.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexEventListener.java @@ -55,6 +55,13 @@ default void afterIndexShardStarted(IndexShard indexShard) {} */ default void beforeIndexShardClosed(ShardId shardId, @Nullable IndexShard indexShard, Settings indexSettings) {} + /** + * Called after the index shard has been marked closed. It could still be waiting for the async close of the engine. + * The ordering between this and the subsequent state notifications (closed, deleted, store closed) is + * not guaranteed. + */ + default void afterIndexShardClosing(ShardId shardId, @Nullable IndexShard indexShard, Settings indexSettings) {} + /** * Called after the index shard has been closed. * From 415578d7af0177e623d8c3030841b5fb8504019d Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 10 May 2024 21:38:44 +0000 Subject: [PATCH 061/119] Bump versions after 8.13.4 release --- .buildkite/pipelines/intake.yml | 2 +- .buildkite/pipelines/periodic-packaging.yml | 6 +++--- .buildkite/pipelines/periodic.yml | 10 +++++----- .ci/bwcVersions | 2 +- .ci/snapshotBwcVersions | 2 +- server/src/main/java/org/elasticsearch/Version.java | 1 + .../resources/org/elasticsearch/TransportVersions.csv | 1 + .../org/elasticsearch/index/IndexVersions.csv | 1 + 8 files changed, 14 insertions(+), 11 deletions(-) diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index e702c97248cd..944230377d07 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -56,7 +56,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["7.17.22", "8.13.4", "8.14.0", "8.15.0"] + BWC_VERSION: ["7.17.22", "8.13.5", "8.14.0", "8.15.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index d013780b1fd0..5ac361c81062 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -529,8 +529,8 @@ steps: env: BWC_VERSION: 8.12.2 - - label: "{{matrix.image}} / 8.13.4 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.13.4 + - label: "{{matrix.image}} / 8.13.5 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.13.5 timeout_in_minutes: 300 matrix: setup: @@ -543,7 +543,7 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.13.4 + BWC_VERSION: 8.13.5 - label: "{{matrix.image}} / 8.14.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.14.0 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 7e9f6872b914..7c5f683cf969 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -591,8 +591,8 @@ steps: - signal_reason: agent_stop limit: 3 - - label: 8.13.4 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.13.4#bwcTest + - label: 8.13.5 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.13.5#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -601,7 +601,7 @@ steps: buildDirectory: /dev/shm/bk preemptible: true env: - BWC_VERSION: 8.13.4 + BWC_VERSION: 8.13.5 retry: automatic: - exit_status: "-1" @@ -714,7 +714,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk17 - BWC_VERSION: ["7.17.22", "8.13.4", "8.14.0", "8.15.0"] + BWC_VERSION: ["7.17.22", "8.13.5", "8.14.0", "8.15.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -760,7 +760,7 @@ steps: - openjdk17 - openjdk21 - openjdk22 - BWC_VERSION: ["7.17.22", "8.13.4", "8.14.0", "8.15.0"] + BWC_VERSION: ["7.17.22", "8.13.5", "8.14.0", "8.15.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 77e51005d5ac..b9afdcf23b85 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -30,6 +30,6 @@ BWC_VERSION: - "8.10.4" - "8.11.4" - "8.12.2" - - "8.13.4" + - "8.13.5" - "8.14.0" - "8.15.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index 49f3708ce4af..213e4e93bc81 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,5 +1,5 @@ BWC_VERSION: - "7.17.22" - - "8.13.4" + - "8.13.5" - "8.14.0" - "8.15.0" diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index a2e04d0bf3d4..dc161766b795 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -174,6 +174,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_13_2 = new Version(8_13_02_99); public static final Version V_8_13_3 = new Version(8_13_03_99); public static final Version V_8_13_4 = new Version(8_13_04_99); + public static final Version V_8_13_5 = new Version(8_13_05_99); public static final Version V_8_14_0 = new Version(8_14_00_99); public static final Version V_8_15_0 = new Version(8_15_00_99); public static final Version CURRENT = V_8_15_0; diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv index e6f0da6a4545..526f327b91c1 100644 --- a/server/src/main/resources/org/elasticsearch/TransportVersions.csv +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv @@ -119,3 +119,4 @@ 8.13.1,8595000 8.13.2,8595000 8.13.3,8595000 +8.13.4,8595001 diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv index bc6523c98761..39f2a701726a 100644 --- a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv @@ -119,3 +119,4 @@ 8.13.1,8503000 8.13.2,8503000 8.13.3,8503000 +8.13.4,8503000 From 06401722c76da450f55820ce613438157d6913e2 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Fri, 10 May 2024 21:41:40 +0000 Subject: [PATCH 062/119] Prune changelogs after 8.13.4 release --- docs/changelog/108276.yaml | 5 ----- docs/changelog/108280.yaml | 6 ------ docs/changelog/108283.yaml | 6 ------ 3 files changed, 17 deletions(-) delete mode 100644 docs/changelog/108276.yaml delete mode 100644 docs/changelog/108280.yaml delete mode 100644 docs/changelog/108283.yaml diff --git a/docs/changelog/108276.yaml b/docs/changelog/108276.yaml deleted file mode 100644 index aaa78073f544..000000000000 --- a/docs/changelog/108276.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 108276 -summary: Fix tsdb codec when doc-values spread in two blocks -area: TSDB -type: bug -issues: [] diff --git a/docs/changelog/108280.yaml b/docs/changelog/108280.yaml deleted file mode 100644 index b36a2f376912..000000000000 --- a/docs/changelog/108280.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 108280 -summary: Ensure necessary security context for s3 bulk deletions -area: Snapshot/Restore -type: bug -issues: - - 108049 diff --git a/docs/changelog/108283.yaml b/docs/changelog/108283.yaml deleted file mode 100644 index 6341a8775b72..000000000000 --- a/docs/changelog/108283.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 108283 -summary: Fix `BlockHash` `DirectEncoder` -area: ES|QL -type: bug -issues: - - 108268 From 6978161418c81f8a9140f93fced8a7549548aea6 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Fri, 10 May 2024 15:39:03 -0700 Subject: [PATCH 063/119] AwaitsFix: https://github.com/elastic/elasticsearch/issues/108529 --- build-tools-internal/muted-tests.yml | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 build-tools-internal/muted-tests.yml diff --git a/build-tools-internal/muted-tests.yml b/build-tools-internal/muted-tests.yml new file mode 100644 index 000000000000..210215a13133 --- /dev/null +++ b/build-tools-internal/muted-tests.yml @@ -0,0 +1,26 @@ +tests: +- class: "org.elasticsearch.xpack.transform.transforms.scheduling.MonotonicClockTests" + issue: "https://github.com/elastic/elasticsearch/issues/108529" +# Examples: +# +# Mute a single test case in a YAML test suite: +# - class: org.elasticsearch.analysis.common.CommonAnalysisClientYamlTestSuiteIT +# method: test {yaml=analysis-common/30_tokenizers/letter} +# issue: https://github.com/elastic/elasticsearch/... +# +# Mute several methods of a Java test: +# - class: org.elasticsearch.common.CharArraysTests +# methods: +# - testCharsBeginsWith +# - testCharsToBytes +# - testConstantTimeEquals +# issue: https://github.com/elastic/elasticsearch/... +# +# Mute an entire test class: +# - class: org.elasticsearch.common.unit.TimeValueTests +# issue: https://github.com/elastic/elasticsearch/... +# +# Mute a single method in a test class: +# - class: org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToIPTests +# method: testCrankyEvaluateBlockWithoutNulls +# issue: https://github.com/elastic/elasticsearch/... From bbe51781f420da06757472d6ee3dac82d1ec442a Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Fri, 10 May 2024 15:44:53 -0700 Subject: [PATCH 064/119] AwaitsFix #108530 --- .../xpack/transform/transforms/pivot/GroupByOptimizerTests.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/GroupByOptimizerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/GroupByOptimizerTests.java index 039781aedb4b..a1ea87a97719 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/GroupByOptimizerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/GroupByOptimizerTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.transform.transforms.pivot; +import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.transform.TransformConfigVersion; import org.elasticsearch.xpack.core.transform.transforms.pivot.SingleGroupSource; @@ -27,6 +28,7 @@ import static org.elasticsearch.xpack.core.transform.transforms.pivot.TermsGroupSourceTests.randomTermsGroupSource; import static org.elasticsearch.xpack.core.transform.transforms.pivot.TermsGroupSourceTests.randomTermsGroupSourceNoScript; +@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/108530") public class GroupByOptimizerTests extends ESTestCase { public void testOneGroupBy() { From 59eeed8f6d89ae7861a372319cab056f345ae49a Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Fri, 10 May 2024 15:52:05 -0700 Subject: [PATCH 065/119] Move mutes to correct location --- build-tools-internal/muted-tests.yml | 26 -------------------------- muted-tests.yml | 2 ++ 2 files changed, 2 insertions(+), 26 deletions(-) delete mode 100644 build-tools-internal/muted-tests.yml diff --git a/build-tools-internal/muted-tests.yml b/build-tools-internal/muted-tests.yml deleted file mode 100644 index 210215a13133..000000000000 --- a/build-tools-internal/muted-tests.yml +++ /dev/null @@ -1,26 +0,0 @@ -tests: -- class: "org.elasticsearch.xpack.transform.transforms.scheduling.MonotonicClockTests" - issue: "https://github.com/elastic/elasticsearch/issues/108529" -# Examples: -# -# Mute a single test case in a YAML test suite: -# - class: org.elasticsearch.analysis.common.CommonAnalysisClientYamlTestSuiteIT -# method: test {yaml=analysis-common/30_tokenizers/letter} -# issue: https://github.com/elastic/elasticsearch/... -# -# Mute several methods of a Java test: -# - class: org.elasticsearch.common.CharArraysTests -# methods: -# - testCharsBeginsWith -# - testCharsToBytes -# - testConstantTimeEquals -# issue: https://github.com/elastic/elasticsearch/... -# -# Mute an entire test class: -# - class: org.elasticsearch.common.unit.TimeValueTests -# issue: https://github.com/elastic/elasticsearch/... -# -# Mute a single method in a test class: -# - class: org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToIPTests -# method: testCrankyEvaluateBlockWithoutNulls -# issue: https://github.com/elastic/elasticsearch/... diff --git a/muted-tests.yml b/muted-tests.yml index 341d127c7b64..210215a13133 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -1,4 +1,6 @@ tests: +- class: "org.elasticsearch.xpack.transform.transforms.scheduling.MonotonicClockTests" + issue: "https://github.com/elastic/elasticsearch/issues/108529" # Examples: # # Mute a single test case in a YAML test suite: From 64ef2f42c2f0d28502ea6b0e022e4bf40078cdfd Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Fri, 10 May 2024 17:19:48 -0700 Subject: [PATCH 066/119] AwaitsFix #108523 --- .../test/java/org/elasticsearch/packaging/test/DockerTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java index b1240747b1a6..dc4e24959a5c 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java @@ -1211,6 +1211,7 @@ private List listPlugins() { /** * Check that readiness listener works */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/108523") public void test500Readiness() throws Exception { assertFalse(readinessProbe(9399)); // Disabling security so we wait for green From 6028232d8895d53179e558a1b9a04b14182e581a Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 10 May 2024 22:38:07 -0700 Subject: [PATCH 067/119] Add metrics command syntax (#108115) This PR introduces a METRICS command in ESQL for timeseries indices. This PR only adds the metrics syntax and translates it into a pair of EsIndex and Aggregate logical plans. Subsequent pull requests will introduce new logical and physical plans for handling time-series aggregations. Some examples of the METRICS command: METRICS tsdb METRICS tsdb max(cpu) BY host METRICS pods load=avg(cpu), writes=max(rate(indexing_requests)) BY pod | SORT pod --- .../xpack/esql/action/TimeSeriesIT.java | 52 +- .../esql/src/main/antlr/EsqlBaseLexer.g4 | 77 +- .../esql/src/main/antlr/EsqlBaseLexer.tokens | 310 +-- .../esql/src/main/antlr/EsqlBaseParser.g4 | 13 +- .../esql/src/main/antlr/EsqlBaseParser.tokens | 310 +-- .../xpack/esql/parser/EsqlBaseLexer.interp | 41 +- .../xpack/esql/parser/EsqlBaseLexer.java | 1711 +++++++++-------- .../xpack/esql/parser/EsqlBaseParser.interp | 23 +- .../xpack/esql/parser/EsqlBaseParser.java | 1702 ++++++++-------- .../parser/EsqlBaseParserBaseListener.java | 16 +- .../parser/EsqlBaseParserBaseVisitor.java | 9 +- .../esql/parser/EsqlBaseParserListener.java | 18 +- .../esql/parser/EsqlBaseParserVisitor.java | 10 +- .../xpack/esql/parser/IdentifierBuilder.java | 8 +- .../xpack/esql/parser/LogicalPlanBuilder.java | 40 +- .../esql/parser/StatementParserTests.java | 143 ++ 16 files changed, 2541 insertions(+), 1942 deletions(-) diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java index 406361438fc4..f82e55462308 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeSeriesIT.java @@ -8,9 +8,17 @@ package org.elasticsearch.xpack.esql.action; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; public class TimeSeriesIT extends AbstractEsqlIntegTestCase { @@ -37,6 +45,48 @@ public void testEmpty() { "type=long,time_series_metric=gauge" ) .get(); - run("FROM pods | LIMIT 1").close(); + run("METRICS pods | LIMIT 1").close(); + } + + public void testSimpleMetrics() { + Settings settings = Settings.builder().put("mode", "time_series").putList("routing_path", List.of("pod")).build(); + client().admin() + .indices() + .prepareCreate("pods") + .setSettings(settings) + .setMapping( + "@timestamp", + "type=date", + "pod", + "type=keyword,time_series_dimension=true", + "cpu", + "type=double,time_series_metric=gauge" + ) + .get(); + List pods = List.of("p1", "p2", "p3"); + long startTime = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2024-04-15T00:00:00Z"); + int numDocs = between(10, 10); + Map> cpus = new HashMap<>(); + for (int i = 0; i < numDocs; i++) { + String pod = randomFrom(pods); + int cpu = randomIntBetween(0, 100); + cpus.computeIfAbsent(pod, k -> new ArrayList<>()).add(cpu); + long timestamp = startTime + (1000L * i); + client().prepareIndex("pods").setSource("@timestamp", timestamp, "pod", pod, "cpu", cpu).get(); + } + List sortedGroups = cpus.keySet().stream().sorted().toList(); + client().admin().indices().prepareRefresh("pods").get(); + try (EsqlQueryResponse resp = run("METRICS pods load=avg(cpu) BY pod | SORT pod")) { + List> rows = EsqlTestUtils.getValuesList(resp); + assertThat(rows, hasSize(sortedGroups.size())); + for (int i = 0; i < rows.size(); i++) { + List r = rows.get(i); + String pod = (String) r.get(1); + assertThat(pod, equalTo(sortedGroups.get(i))); + List values = cpus.get(pod); + double avg = values.stream().mapToDouble(n -> n).sum() / values.size(); + assertThat((double) r.get(0), equalTo(avg)); + } + } } } diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index f16afa86199f..9f005db107ae 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -11,6 +11,7 @@ INLINESTATS : 'inlinestats' -> pushMode(EXPRESSION_MODE); KEEP : 'keep' -> pushMode(PROJECT_MODE); LIMIT : 'limit' -> pushMode(EXPRESSION_MODE); META : 'meta' -> pushMode(META_MODE); +METRICS : 'metrics' -> pushMode(METRICS_MODE); MV_EXPAND : 'mv_expand' -> pushMode(MVEXPAND_MODE); RENAME : 'rename' -> pushMode(RENAME_MODE); ROW : 'row' -> pushMode(EXPRESSION_MODE); @@ -31,6 +32,16 @@ MULTILINE_COMMENT WS : [ \r\n\t]+ -> channel(HIDDEN) ; + +fragment INDEX_UNQUOTED_IDENTIFIER_PART + : ~[=`|,[\]/ \t\r\n] + | '/' ~[*/] // allow single / but not followed by another / or * which would start a comment + ; + +INDEX_UNQUOTED_IDENTIFIER + : INDEX_UNQUOTED_IDENTIFIER_PART+ + ; + // // Explain // @@ -192,13 +203,8 @@ FROM_QUOTED_STRING : QUOTED_STRING -> type(QUOTED_STRING); OPTIONS : 'options'; METADATA : 'metadata'; -fragment FROM_UNQUOTED_IDENTIFIER_PART - : ~[=`|,[\]/ \t\r\n] - | '/' ~[*/] // allow single / but not followed by another / or * which would start a comment - ; - -FROM_UNQUOTED_IDENTIFIER - : FROM_UNQUOTED_IDENTIFIER_PART+ +FROM_INDEX_UNQUOTED_IDENTIFIER + : INDEX_UNQUOTED_IDENTIFIER -> type(INDEX_UNQUOTED_IDENTIFIER) ; FROM_LINE_COMMENT @@ -424,3 +430,60 @@ SETTING_WS : WS -> channel(HIDDEN) ; + +// +// METRICS command +// +mode METRICS_MODE; +METRICS_PIPE : PIPE -> type(PIPE), popMode; + +METRICS_INDEX_UNQUOTED_IDENTIFIER + : INDEX_UNQUOTED_IDENTIFIER -> type(INDEX_UNQUOTED_IDENTIFIER), popMode, pushMode(CLOSING_METRICS_MODE) + ; + +METRICS_LINE_COMMENT + : LINE_COMMENT -> channel(HIDDEN) + ; + +METRICS_MULTILINE_COMMENT + : MULTILINE_COMMENT -> channel(HIDDEN) + ; + +METRICS_WS + : WS -> channel(HIDDEN) + ; + +// TODO: remove this workaround mode - see https://github.com/elastic/elasticsearch/issues/108528 +mode CLOSING_METRICS_MODE; + +CLOSING_METRICS_COMMA + : COMMA -> type(COMMA), popMode, pushMode(METRICS_MODE) + ; + +CLOSING_METRICS_LINE_COMMENT + : LINE_COMMENT -> channel(HIDDEN) + ; + +CLOSING_METRICS_MULTILINE_COMMENT + : MULTILINE_COMMENT -> channel(HIDDEN) + ; + +CLOSING_METRICS_WS + : WS -> channel(HIDDEN) + ; + +CLOSING_METRICS_QUOTED_IDENTIFIER + : QUOTED_IDENTIFIER -> popMode, pushMode(EXPRESSION_MODE), type(QUOTED_IDENTIFIER) + ; + +CLOSING_METRICS_UNQUOTED_IDENTIFIER + :UNQUOTED_IDENTIFIER -> popMode, pushMode(EXPRESSION_MODE), type(UNQUOTED_IDENTIFIER) + ; + +CLOSING_METRICS_BY + :BY -> popMode, pushMode(EXPRESSION_MODE), type(BY) + ; + +CLOSING_METRICS_PIPE + : PIPE -> type(PIPE), popMode + ; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens index b496aa68b61f..15a8356d1b94 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens @@ -9,105 +9,112 @@ INLINESTATS=8 KEEP=9 LIMIT=10 META=11 -MV_EXPAND=12 -RENAME=13 -ROW=14 -SHOW=15 -SORT=16 -STATS=17 -WHERE=18 -UNKNOWN_CMD=19 -LINE_COMMENT=20 -MULTILINE_COMMENT=21 -WS=22 -EXPLAIN_WS=23 -EXPLAIN_LINE_COMMENT=24 -EXPLAIN_MULTILINE_COMMENT=25 -PIPE=26 -QUOTED_STRING=27 -INTEGER_LITERAL=28 -DECIMAL_LITERAL=29 -BY=30 -AND=31 -ASC=32 -ASSIGN=33 -CAST_OP=34 -COMMA=35 -DESC=36 -DOT=37 -FALSE=38 -FIRST=39 -LAST=40 -LP=41 -IN=42 -IS=43 -LIKE=44 -NOT=45 -NULL=46 -NULLS=47 -OR=48 -PARAM=49 -RLIKE=50 -RP=51 -TRUE=52 -EQ=53 -CIEQ=54 -NEQ=55 -LT=56 -LTE=57 -GT=58 -GTE=59 -PLUS=60 -MINUS=61 -ASTERISK=62 -SLASH=63 -PERCENT=64 -OPENING_BRACKET=65 -CLOSING_BRACKET=66 -UNQUOTED_IDENTIFIER=67 -QUOTED_IDENTIFIER=68 -EXPR_LINE_COMMENT=69 -EXPR_MULTILINE_COMMENT=70 -EXPR_WS=71 -OPTIONS=72 -METADATA=73 -FROM_UNQUOTED_IDENTIFIER=74 -FROM_LINE_COMMENT=75 -FROM_MULTILINE_COMMENT=76 -FROM_WS=77 -ID_PATTERN=78 -PROJECT_LINE_COMMENT=79 -PROJECT_MULTILINE_COMMENT=80 -PROJECT_WS=81 -AS=82 -RENAME_LINE_COMMENT=83 -RENAME_MULTILINE_COMMENT=84 -RENAME_WS=85 -ON=86 -WITH=87 -ENRICH_POLICY_NAME=88 -ENRICH_LINE_COMMENT=89 -ENRICH_MULTILINE_COMMENT=90 -ENRICH_WS=91 -ENRICH_FIELD_LINE_COMMENT=92 -ENRICH_FIELD_MULTILINE_COMMENT=93 -ENRICH_FIELD_WS=94 -MVEXPAND_LINE_COMMENT=95 -MVEXPAND_MULTILINE_COMMENT=96 -MVEXPAND_WS=97 -INFO=98 -SHOW_LINE_COMMENT=99 -SHOW_MULTILINE_COMMENT=100 -SHOW_WS=101 -FUNCTIONS=102 -META_LINE_COMMENT=103 -META_MULTILINE_COMMENT=104 -META_WS=105 -COLON=106 -SETTING=107 -SETTING_LINE_COMMENT=108 -SETTTING_MULTILINE_COMMENT=109 -SETTING_WS=110 +METRICS=12 +MV_EXPAND=13 +RENAME=14 +ROW=15 +SHOW=16 +SORT=17 +STATS=18 +WHERE=19 +UNKNOWN_CMD=20 +LINE_COMMENT=21 +MULTILINE_COMMENT=22 +WS=23 +INDEX_UNQUOTED_IDENTIFIER=24 +EXPLAIN_WS=25 +EXPLAIN_LINE_COMMENT=26 +EXPLAIN_MULTILINE_COMMENT=27 +PIPE=28 +QUOTED_STRING=29 +INTEGER_LITERAL=30 +DECIMAL_LITERAL=31 +BY=32 +AND=33 +ASC=34 +ASSIGN=35 +CAST_OP=36 +COMMA=37 +DESC=38 +DOT=39 +FALSE=40 +FIRST=41 +LAST=42 +LP=43 +IN=44 +IS=45 +LIKE=46 +NOT=47 +NULL=48 +NULLS=49 +OR=50 +PARAM=51 +RLIKE=52 +RP=53 +TRUE=54 +EQ=55 +CIEQ=56 +NEQ=57 +LT=58 +LTE=59 +GT=60 +GTE=61 +PLUS=62 +MINUS=63 +ASTERISK=64 +SLASH=65 +PERCENT=66 +OPENING_BRACKET=67 +CLOSING_BRACKET=68 +UNQUOTED_IDENTIFIER=69 +QUOTED_IDENTIFIER=70 +EXPR_LINE_COMMENT=71 +EXPR_MULTILINE_COMMENT=72 +EXPR_WS=73 +OPTIONS=74 +METADATA=75 +FROM_LINE_COMMENT=76 +FROM_MULTILINE_COMMENT=77 +FROM_WS=78 +ID_PATTERN=79 +PROJECT_LINE_COMMENT=80 +PROJECT_MULTILINE_COMMENT=81 +PROJECT_WS=82 +AS=83 +RENAME_LINE_COMMENT=84 +RENAME_MULTILINE_COMMENT=85 +RENAME_WS=86 +ON=87 +WITH=88 +ENRICH_POLICY_NAME=89 +ENRICH_LINE_COMMENT=90 +ENRICH_MULTILINE_COMMENT=91 +ENRICH_WS=92 +ENRICH_FIELD_LINE_COMMENT=93 +ENRICH_FIELD_MULTILINE_COMMENT=94 +ENRICH_FIELD_WS=95 +MVEXPAND_LINE_COMMENT=96 +MVEXPAND_MULTILINE_COMMENT=97 +MVEXPAND_WS=98 +INFO=99 +SHOW_LINE_COMMENT=100 +SHOW_MULTILINE_COMMENT=101 +SHOW_WS=102 +FUNCTIONS=103 +META_LINE_COMMENT=104 +META_MULTILINE_COMMENT=105 +META_WS=106 +COLON=107 +SETTING=108 +SETTING_LINE_COMMENT=109 +SETTTING_MULTILINE_COMMENT=110 +SETTING_WS=111 +METRICS_LINE_COMMENT=112 +METRICS_MULTILINE_COMMENT=113 +METRICS_WS=114 +CLOSING_METRICS_LINE_COMMENT=115 +CLOSING_METRICS_MULTILINE_COMMENT=116 +CLOSING_METRICS_WS=117 'dissect'=1 'drop'=2 'enrich'=3 @@ -119,55 +126,56 @@ SETTING_WS=110 'keep'=9 'limit'=10 'meta'=11 -'mv_expand'=12 -'rename'=13 -'row'=14 -'show'=15 -'sort'=16 -'stats'=17 -'where'=18 -'|'=26 -'by'=30 -'and'=31 -'asc'=32 -'='=33 -'::'=34 -','=35 -'desc'=36 -'.'=37 -'false'=38 -'first'=39 -'last'=40 -'('=41 -'in'=42 -'is'=43 -'like'=44 -'not'=45 -'null'=46 -'nulls'=47 -'or'=48 -'?'=49 -'rlike'=50 -')'=51 -'true'=52 -'=='=53 -'=~'=54 -'!='=55 -'<'=56 -'<='=57 -'>'=58 -'>='=59 -'+'=60 -'-'=61 -'*'=62 -'/'=63 -'%'=64 -']'=66 -'options'=72 -'metadata'=73 -'as'=82 -'on'=86 -'with'=87 -'info'=98 -'functions'=102 -':'=106 +'metrics'=12 +'mv_expand'=13 +'rename'=14 +'row'=15 +'show'=16 +'sort'=17 +'stats'=18 +'where'=19 +'|'=28 +'by'=32 +'and'=33 +'asc'=34 +'='=35 +'::'=36 +','=37 +'desc'=38 +'.'=39 +'false'=40 +'first'=41 +'last'=42 +'('=43 +'in'=44 +'is'=45 +'like'=46 +'not'=47 +'null'=48 +'nulls'=49 +'or'=50 +'?'=51 +'rlike'=52 +')'=53 +'true'=54 +'=='=55 +'=~'=56 +'!='=57 +'<'=58 +'<='=59 +'>'=60 +'>='=61 +'+'=62 +'-'=63 +'*'=64 +'/'=65 +'%'=66 +']'=68 +'options'=74 +'metadata'=75 +'as'=83 +'on'=87 +'with'=88 +'info'=99 +'functions'=103 +':'=107 diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index e30bc8359594..e023991b7418 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -23,6 +23,7 @@ sourceCommand : explainCommand | fromCommand | rowCommand + | metricsCommand | showCommand | metaCommand ; @@ -104,11 +105,11 @@ field ; fromCommand - : FROM fromIdentifier (COMMA fromIdentifier)* metadata? fromOptions? + : FROM indexIdentifier (COMMA indexIdentifier)* metadata? fromOptions? ; -fromIdentifier - : FROM_UNQUOTED_IDENTIFIER +indexIdentifier + : INDEX_UNQUOTED_IDENTIFIER ; fromOptions @@ -125,13 +126,17 @@ metadata ; metadataOption - : METADATA fromIdentifier (COMMA fromIdentifier)* + : METADATA indexIdentifier (COMMA indexIdentifier)* ; deprecated_metadata : OPENING_BRACKET metadataOption CLOSING_BRACKET ; +metricsCommand + : METRICS indexIdentifier (COMMA indexIdentifier)* aggregates=fields? (BY grouping=fields)? + ; + evalCommand : EVAL fields ; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens index b496aa68b61f..15a8356d1b94 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens @@ -9,105 +9,112 @@ INLINESTATS=8 KEEP=9 LIMIT=10 META=11 -MV_EXPAND=12 -RENAME=13 -ROW=14 -SHOW=15 -SORT=16 -STATS=17 -WHERE=18 -UNKNOWN_CMD=19 -LINE_COMMENT=20 -MULTILINE_COMMENT=21 -WS=22 -EXPLAIN_WS=23 -EXPLAIN_LINE_COMMENT=24 -EXPLAIN_MULTILINE_COMMENT=25 -PIPE=26 -QUOTED_STRING=27 -INTEGER_LITERAL=28 -DECIMAL_LITERAL=29 -BY=30 -AND=31 -ASC=32 -ASSIGN=33 -CAST_OP=34 -COMMA=35 -DESC=36 -DOT=37 -FALSE=38 -FIRST=39 -LAST=40 -LP=41 -IN=42 -IS=43 -LIKE=44 -NOT=45 -NULL=46 -NULLS=47 -OR=48 -PARAM=49 -RLIKE=50 -RP=51 -TRUE=52 -EQ=53 -CIEQ=54 -NEQ=55 -LT=56 -LTE=57 -GT=58 -GTE=59 -PLUS=60 -MINUS=61 -ASTERISK=62 -SLASH=63 -PERCENT=64 -OPENING_BRACKET=65 -CLOSING_BRACKET=66 -UNQUOTED_IDENTIFIER=67 -QUOTED_IDENTIFIER=68 -EXPR_LINE_COMMENT=69 -EXPR_MULTILINE_COMMENT=70 -EXPR_WS=71 -OPTIONS=72 -METADATA=73 -FROM_UNQUOTED_IDENTIFIER=74 -FROM_LINE_COMMENT=75 -FROM_MULTILINE_COMMENT=76 -FROM_WS=77 -ID_PATTERN=78 -PROJECT_LINE_COMMENT=79 -PROJECT_MULTILINE_COMMENT=80 -PROJECT_WS=81 -AS=82 -RENAME_LINE_COMMENT=83 -RENAME_MULTILINE_COMMENT=84 -RENAME_WS=85 -ON=86 -WITH=87 -ENRICH_POLICY_NAME=88 -ENRICH_LINE_COMMENT=89 -ENRICH_MULTILINE_COMMENT=90 -ENRICH_WS=91 -ENRICH_FIELD_LINE_COMMENT=92 -ENRICH_FIELD_MULTILINE_COMMENT=93 -ENRICH_FIELD_WS=94 -MVEXPAND_LINE_COMMENT=95 -MVEXPAND_MULTILINE_COMMENT=96 -MVEXPAND_WS=97 -INFO=98 -SHOW_LINE_COMMENT=99 -SHOW_MULTILINE_COMMENT=100 -SHOW_WS=101 -FUNCTIONS=102 -META_LINE_COMMENT=103 -META_MULTILINE_COMMENT=104 -META_WS=105 -COLON=106 -SETTING=107 -SETTING_LINE_COMMENT=108 -SETTTING_MULTILINE_COMMENT=109 -SETTING_WS=110 +METRICS=12 +MV_EXPAND=13 +RENAME=14 +ROW=15 +SHOW=16 +SORT=17 +STATS=18 +WHERE=19 +UNKNOWN_CMD=20 +LINE_COMMENT=21 +MULTILINE_COMMENT=22 +WS=23 +INDEX_UNQUOTED_IDENTIFIER=24 +EXPLAIN_WS=25 +EXPLAIN_LINE_COMMENT=26 +EXPLAIN_MULTILINE_COMMENT=27 +PIPE=28 +QUOTED_STRING=29 +INTEGER_LITERAL=30 +DECIMAL_LITERAL=31 +BY=32 +AND=33 +ASC=34 +ASSIGN=35 +CAST_OP=36 +COMMA=37 +DESC=38 +DOT=39 +FALSE=40 +FIRST=41 +LAST=42 +LP=43 +IN=44 +IS=45 +LIKE=46 +NOT=47 +NULL=48 +NULLS=49 +OR=50 +PARAM=51 +RLIKE=52 +RP=53 +TRUE=54 +EQ=55 +CIEQ=56 +NEQ=57 +LT=58 +LTE=59 +GT=60 +GTE=61 +PLUS=62 +MINUS=63 +ASTERISK=64 +SLASH=65 +PERCENT=66 +OPENING_BRACKET=67 +CLOSING_BRACKET=68 +UNQUOTED_IDENTIFIER=69 +QUOTED_IDENTIFIER=70 +EXPR_LINE_COMMENT=71 +EXPR_MULTILINE_COMMENT=72 +EXPR_WS=73 +OPTIONS=74 +METADATA=75 +FROM_LINE_COMMENT=76 +FROM_MULTILINE_COMMENT=77 +FROM_WS=78 +ID_PATTERN=79 +PROJECT_LINE_COMMENT=80 +PROJECT_MULTILINE_COMMENT=81 +PROJECT_WS=82 +AS=83 +RENAME_LINE_COMMENT=84 +RENAME_MULTILINE_COMMENT=85 +RENAME_WS=86 +ON=87 +WITH=88 +ENRICH_POLICY_NAME=89 +ENRICH_LINE_COMMENT=90 +ENRICH_MULTILINE_COMMENT=91 +ENRICH_WS=92 +ENRICH_FIELD_LINE_COMMENT=93 +ENRICH_FIELD_MULTILINE_COMMENT=94 +ENRICH_FIELD_WS=95 +MVEXPAND_LINE_COMMENT=96 +MVEXPAND_MULTILINE_COMMENT=97 +MVEXPAND_WS=98 +INFO=99 +SHOW_LINE_COMMENT=100 +SHOW_MULTILINE_COMMENT=101 +SHOW_WS=102 +FUNCTIONS=103 +META_LINE_COMMENT=104 +META_MULTILINE_COMMENT=105 +META_WS=106 +COLON=107 +SETTING=108 +SETTING_LINE_COMMENT=109 +SETTTING_MULTILINE_COMMENT=110 +SETTING_WS=111 +METRICS_LINE_COMMENT=112 +METRICS_MULTILINE_COMMENT=113 +METRICS_WS=114 +CLOSING_METRICS_LINE_COMMENT=115 +CLOSING_METRICS_MULTILINE_COMMENT=116 +CLOSING_METRICS_WS=117 'dissect'=1 'drop'=2 'enrich'=3 @@ -119,55 +126,56 @@ SETTING_WS=110 'keep'=9 'limit'=10 'meta'=11 -'mv_expand'=12 -'rename'=13 -'row'=14 -'show'=15 -'sort'=16 -'stats'=17 -'where'=18 -'|'=26 -'by'=30 -'and'=31 -'asc'=32 -'='=33 -'::'=34 -','=35 -'desc'=36 -'.'=37 -'false'=38 -'first'=39 -'last'=40 -'('=41 -'in'=42 -'is'=43 -'like'=44 -'not'=45 -'null'=46 -'nulls'=47 -'or'=48 -'?'=49 -'rlike'=50 -')'=51 -'true'=52 -'=='=53 -'=~'=54 -'!='=55 -'<'=56 -'<='=57 -'>'=58 -'>='=59 -'+'=60 -'-'=61 -'*'=62 -'/'=63 -'%'=64 -']'=66 -'options'=72 -'metadata'=73 -'as'=82 -'on'=86 -'with'=87 -'info'=98 -'functions'=102 -':'=106 +'metrics'=12 +'mv_expand'=13 +'rename'=14 +'row'=15 +'show'=16 +'sort'=17 +'stats'=18 +'where'=19 +'|'=28 +'by'=32 +'and'=33 +'asc'=34 +'='=35 +'::'=36 +','=37 +'desc'=38 +'.'=39 +'false'=40 +'first'=41 +'last'=42 +'('=43 +'in'=44 +'is'=45 +'like'=46 +'not'=47 +'null'=48 +'nulls'=49 +'or'=50 +'?'=51 +'rlike'=52 +')'=53 +'true'=54 +'=='=55 +'=~'=56 +'!='=57 +'<'=58 +'<='=59 +'>'=60 +'>='=61 +'+'=62 +'-'=63 +'*'=64 +'/'=65 +'%'=66 +']'=68 +'options'=74 +'metadata'=75 +'as'=83 +'on'=87 +'with'=88 +'info'=99 +'functions'=103 +':'=107 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index d6ad79586fa7..899f745e50c3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -11,6 +11,7 @@ null 'keep' 'limit' 'meta' +'metrics' 'mv_expand' 'rename' 'row' @@ -25,6 +26,7 @@ null null null null +null '|' null null @@ -80,7 +82,6 @@ null null null null -null 'as' null null @@ -110,6 +111,12 @@ null null null null +null +null +null +null +null +null token symbolic names: null @@ -124,6 +131,7 @@ INLINESTATS KEEP LIMIT META +METRICS MV_EXPAND RENAME ROW @@ -135,6 +143,7 @@ UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT WS +INDEX_UNQUOTED_IDENTIFIER EXPLAIN_WS EXPLAIN_LINE_COMMENT EXPLAIN_MULTILINE_COMMENT @@ -186,7 +195,6 @@ EXPR_MULTILINE_COMMENT EXPR_WS OPTIONS METADATA -FROM_UNQUOTED_IDENTIFIER FROM_LINE_COMMENT FROM_MULTILINE_COMMENT FROM_WS @@ -223,6 +231,12 @@ SETTING SETTING_LINE_COMMENT SETTTING_MULTILINE_COMMENT SETTING_WS +METRICS_LINE_COMMENT +METRICS_MULTILINE_COMMENT +METRICS_WS +CLOSING_METRICS_LINE_COMMENT +CLOSING_METRICS_MULTILINE_COMMENT +CLOSING_METRICS_WS rule names: DISSECT @@ -236,6 +250,7 @@ INLINESTATS KEEP LIMIT META +METRICS MV_EXPAND RENAME ROW @@ -247,6 +262,8 @@ UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT WS +INDEX_UNQUOTED_IDENTIFIER_PART +INDEX_UNQUOTED_IDENTIFIER EXPLAIN_OPENING_BRACKET EXPLAIN_PIPE EXPLAIN_WS @@ -317,8 +334,7 @@ FROM_ASSIGN FROM_QUOTED_STRING OPTIONS METADATA -FROM_UNQUOTED_IDENTIFIER_PART -FROM_UNQUOTED_IDENTIFIER +FROM_INDEX_UNQUOTED_IDENTIFIER FROM_LINE_COMMENT FROM_MULTILINE_COMMENT FROM_WS @@ -384,6 +400,19 @@ SETTING SETTING_LINE_COMMENT SETTTING_MULTILINE_COMMENT SETTING_WS +METRICS_PIPE +METRICS_INDEX_UNQUOTED_IDENTIFIER +METRICS_LINE_COMMENT +METRICS_MULTILINE_COMMENT +METRICS_WS +CLOSING_METRICS_COMMA +CLOSING_METRICS_LINE_COMMENT +CLOSING_METRICS_MULTILINE_COMMENT +CLOSING_METRICS_WS +CLOSING_METRICS_QUOTED_IDENTIFIER +CLOSING_METRICS_UNQUOTED_IDENTIFIER +CLOSING_METRICS_BY +CLOSING_METRICS_PIPE channel names: DEFAULT_TOKEN_CHANNEL @@ -402,6 +431,8 @@ MVEXPAND_MODE SHOW_MODE META_MODE SETTING_MODE +METRICS_MODE +CLOSING_METRICS_MODE atn: -[4, 0, 110, 1197, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 4, 18, 482, 8, 18, 11, 18, 12, 18, 483, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 492, 8, 19, 10, 19, 12, 19, 495, 9, 19, 1, 19, 3, 19, 498, 8, 19, 1, 19, 3, 19, 501, 8, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 510, 8, 20, 10, 20, 12, 20, 513, 9, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 4, 21, 521, 8, 21, 11, 21, 12, 21, 522, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 3, 32, 564, 8, 32, 1, 32, 4, 32, 567, 8, 32, 11, 32, 12, 32, 568, 1, 33, 1, 33, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 3, 35, 578, 8, 35, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 3, 37, 585, 8, 37, 1, 38, 1, 38, 1, 38, 5, 38, 590, 8, 38, 10, 38, 12, 38, 593, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 601, 8, 38, 10, 38, 12, 38, 604, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 611, 8, 38, 1, 38, 3, 38, 614, 8, 38, 3, 38, 616, 8, 38, 1, 39, 4, 39, 619, 8, 39, 11, 39, 12, 39, 620, 1, 40, 4, 40, 624, 8, 40, 11, 40, 12, 40, 625, 1, 40, 1, 40, 5, 40, 630, 8, 40, 10, 40, 12, 40, 633, 9, 40, 1, 40, 1, 40, 4, 40, 637, 8, 40, 11, 40, 12, 40, 638, 1, 40, 4, 40, 642, 8, 40, 11, 40, 12, 40, 643, 1, 40, 1, 40, 5, 40, 648, 8, 40, 10, 40, 12, 40, 651, 9, 40, 3, 40, 653, 8, 40, 1, 40, 1, 40, 1, 40, 1, 40, 4, 40, 659, 8, 40, 11, 40, 12, 40, 660, 1, 40, 1, 40, 3, 40, 665, 8, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 1, 77, 1, 77, 1, 77, 1, 78, 1, 78, 5, 78, 796, 8, 78, 10, 78, 12, 78, 799, 9, 78, 1, 78, 1, 78, 3, 78, 803, 8, 78, 1, 78, 4, 78, 806, 8, 78, 11, 78, 12, 78, 807, 3, 78, 810, 8, 78, 1, 79, 1, 79, 4, 79, 814, 8, 79, 11, 79, 12, 79, 815, 1, 79, 1, 79, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 3, 92, 879, 8, 92, 1, 93, 4, 93, 882, 8, 93, 11, 93, 12, 93, 883, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 3, 100, 915, 8, 100, 1, 101, 1, 101, 3, 101, 919, 8, 101, 1, 101, 5, 101, 922, 8, 101, 10, 101, 12, 101, 925, 9, 101, 1, 101, 1, 101, 3, 101, 929, 8, 101, 1, 101, 4, 101, 932, 8, 101, 11, 101, 12, 101, 933, 3, 101, 936, 8, 101, 1, 102, 1, 102, 4, 102, 940, 8, 102, 11, 102, 12, 102, 941, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 120, 4, 120, 1017, 8, 120, 11, 120, 12, 120, 1018, 1, 120, 1, 120, 3, 120, 1023, 8, 120, 1, 120, 4, 120, 1026, 8, 120, 11, 120, 12, 120, 1027, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 4, 155, 1182, 8, 155, 11, 155, 12, 155, 1183, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 2, 511, 602, 0, 159, 12, 1, 14, 2, 16, 3, 18, 4, 20, 5, 22, 6, 24, 7, 26, 8, 28, 9, 30, 10, 32, 11, 34, 12, 36, 13, 38, 14, 40, 15, 42, 16, 44, 17, 46, 18, 48, 19, 50, 20, 52, 21, 54, 22, 56, 0, 58, 0, 60, 23, 62, 24, 64, 25, 66, 26, 68, 0, 70, 0, 72, 0, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 27, 90, 28, 92, 29, 94, 30, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 67, 170, 0, 172, 68, 174, 69, 176, 70, 178, 71, 180, 0, 182, 0, 184, 0, 186, 0, 188, 0, 190, 0, 192, 72, 194, 73, 196, 0, 198, 74, 200, 75, 202, 76, 204, 77, 206, 0, 208, 0, 210, 0, 212, 0, 214, 0, 216, 78, 218, 79, 220, 80, 222, 81, 224, 0, 226, 0, 228, 0, 230, 0, 232, 82, 234, 0, 236, 83, 238, 84, 240, 85, 242, 0, 244, 0, 246, 86, 248, 87, 250, 0, 252, 88, 254, 0, 256, 0, 258, 89, 260, 90, 262, 91, 264, 0, 266, 0, 268, 0, 270, 0, 272, 0, 274, 0, 276, 0, 278, 92, 280, 93, 282, 94, 284, 0, 286, 0, 288, 0, 290, 0, 292, 95, 294, 96, 296, 97, 298, 0, 300, 98, 302, 99, 304, 100, 306, 101, 308, 0, 310, 102, 312, 103, 314, 104, 316, 105, 318, 0, 320, 106, 322, 107, 324, 108, 326, 109, 328, 110, 12, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 5, 0, 34, 34, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 10, 0, 9, 10, 13, 13, 32, 32, 44, 44, 47, 47, 61, 61, 91, 91, 93, 93, 96, 96, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1224, 0, 12, 1, 0, 0, 0, 0, 14, 1, 0, 0, 0, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 1, 56, 1, 0, 0, 0, 1, 58, 1, 0, 0, 0, 1, 60, 1, 0, 0, 0, 1, 62, 1, 0, 0, 0, 1, 64, 1, 0, 0, 0, 2, 66, 1, 0, 0, 0, 2, 88, 1, 0, 0, 0, 2, 90, 1, 0, 0, 0, 2, 92, 1, 0, 0, 0, 2, 94, 1, 0, 0, 0, 2, 96, 1, 0, 0, 0, 2, 98, 1, 0, 0, 0, 2, 100, 1, 0, 0, 0, 2, 102, 1, 0, 0, 0, 2, 104, 1, 0, 0, 0, 2, 106, 1, 0, 0, 0, 2, 108, 1, 0, 0, 0, 2, 110, 1, 0, 0, 0, 2, 112, 1, 0, 0, 0, 2, 114, 1, 0, 0, 0, 2, 116, 1, 0, 0, 0, 2, 118, 1, 0, 0, 0, 2, 120, 1, 0, 0, 0, 2, 122, 1, 0, 0, 0, 2, 124, 1, 0, 0, 0, 2, 126, 1, 0, 0, 0, 2, 128, 1, 0, 0, 0, 2, 130, 1, 0, 0, 0, 2, 132, 1, 0, 0, 0, 2, 134, 1, 0, 0, 0, 2, 136, 1, 0, 0, 0, 2, 138, 1, 0, 0, 0, 2, 140, 1, 0, 0, 0, 2, 142, 1, 0, 0, 0, 2, 144, 1, 0, 0, 0, 2, 146, 1, 0, 0, 0, 2, 148, 1, 0, 0, 0, 2, 150, 1, 0, 0, 0, 2, 152, 1, 0, 0, 0, 2, 154, 1, 0, 0, 0, 2, 156, 1, 0, 0, 0, 2, 158, 1, 0, 0, 0, 2, 160, 1, 0, 0, 0, 2, 162, 1, 0, 0, 0, 2, 164, 1, 0, 0, 0, 2, 166, 1, 0, 0, 0, 2, 168, 1, 0, 0, 0, 2, 172, 1, 0, 0, 0, 2, 174, 1, 0, 0, 0, 2, 176, 1, 0, 0, 0, 2, 178, 1, 0, 0, 0, 3, 180, 1, 0, 0, 0, 3, 182, 1, 0, 0, 0, 3, 184, 1, 0, 0, 0, 3, 186, 1, 0, 0, 0, 3, 188, 1, 0, 0, 0, 3, 190, 1, 0, 0, 0, 3, 192, 1, 0, 0, 0, 3, 194, 1, 0, 0, 0, 3, 198, 1, 0, 0, 0, 3, 200, 1, 0, 0, 0, 3, 202, 1, 0, 0, 0, 3, 204, 1, 0, 0, 0, 4, 206, 1, 0, 0, 0, 4, 208, 1, 0, 0, 0, 4, 210, 1, 0, 0, 0, 4, 216, 1, 0, 0, 0, 4, 218, 1, 0, 0, 0, 4, 220, 1, 0, 0, 0, 4, 222, 1, 0, 0, 0, 5, 224, 1, 0, 0, 0, 5, 226, 1, 0, 0, 0, 5, 228, 1, 0, 0, 0, 5, 230, 1, 0, 0, 0, 5, 232, 1, 0, 0, 0, 5, 234, 1, 0, 0, 0, 5, 236, 1, 0, 0, 0, 5, 238, 1, 0, 0, 0, 5, 240, 1, 0, 0, 0, 6, 242, 1, 0, 0, 0, 6, 244, 1, 0, 0, 0, 6, 246, 1, 0, 0, 0, 6, 248, 1, 0, 0, 0, 6, 252, 1, 0, 0, 0, 6, 254, 1, 0, 0, 0, 6, 256, 1, 0, 0, 0, 6, 258, 1, 0, 0, 0, 6, 260, 1, 0, 0, 0, 6, 262, 1, 0, 0, 0, 7, 264, 1, 0, 0, 0, 7, 266, 1, 0, 0, 0, 7, 268, 1, 0, 0, 0, 7, 270, 1, 0, 0, 0, 7, 272, 1, 0, 0, 0, 7, 274, 1, 0, 0, 0, 7, 276, 1, 0, 0, 0, 7, 278, 1, 0, 0, 0, 7, 280, 1, 0, 0, 0, 7, 282, 1, 0, 0, 0, 8, 284, 1, 0, 0, 0, 8, 286, 1, 0, 0, 0, 8, 288, 1, 0, 0, 0, 8, 290, 1, 0, 0, 0, 8, 292, 1, 0, 0, 0, 8, 294, 1, 0, 0, 0, 8, 296, 1, 0, 0, 0, 9, 298, 1, 0, 0, 0, 9, 300, 1, 0, 0, 0, 9, 302, 1, 0, 0, 0, 9, 304, 1, 0, 0, 0, 9, 306, 1, 0, 0, 0, 10, 308, 1, 0, 0, 0, 10, 310, 1, 0, 0, 0, 10, 312, 1, 0, 0, 0, 10, 314, 1, 0, 0, 0, 10, 316, 1, 0, 0, 0, 11, 318, 1, 0, 0, 0, 11, 320, 1, 0, 0, 0, 11, 322, 1, 0, 0, 0, 11, 324, 1, 0, 0, 0, 11, 326, 1, 0, 0, 0, 11, 328, 1, 0, 0, 0, 12, 330, 1, 0, 0, 0, 14, 340, 1, 0, 0, 0, 16, 347, 1, 0, 0, 0, 18, 356, 1, 0, 0, 0, 20, 363, 1, 0, 0, 0, 22, 373, 1, 0, 0, 0, 24, 380, 1, 0, 0, 0, 26, 387, 1, 0, 0, 0, 28, 401, 1, 0, 0, 0, 30, 408, 1, 0, 0, 0, 32, 416, 1, 0, 0, 0, 34, 423, 1, 0, 0, 0, 36, 435, 1, 0, 0, 0, 38, 444, 1, 0, 0, 0, 40, 450, 1, 0, 0, 0, 42, 457, 1, 0, 0, 0, 44, 464, 1, 0, 0, 0, 46, 472, 1, 0, 0, 0, 48, 481, 1, 0, 0, 0, 50, 487, 1, 0, 0, 0, 52, 504, 1, 0, 0, 0, 54, 520, 1, 0, 0, 0, 56, 526, 1, 0, 0, 0, 58, 531, 1, 0, 0, 0, 60, 536, 1, 0, 0, 0, 62, 540, 1, 0, 0, 0, 64, 544, 1, 0, 0, 0, 66, 548, 1, 0, 0, 0, 68, 552, 1, 0, 0, 0, 70, 554, 1, 0, 0, 0, 72, 556, 1, 0, 0, 0, 74, 559, 1, 0, 0, 0, 76, 561, 1, 0, 0, 0, 78, 570, 1, 0, 0, 0, 80, 572, 1, 0, 0, 0, 82, 577, 1, 0, 0, 0, 84, 579, 1, 0, 0, 0, 86, 584, 1, 0, 0, 0, 88, 615, 1, 0, 0, 0, 90, 618, 1, 0, 0, 0, 92, 664, 1, 0, 0, 0, 94, 666, 1, 0, 0, 0, 96, 669, 1, 0, 0, 0, 98, 673, 1, 0, 0, 0, 100, 677, 1, 0, 0, 0, 102, 679, 1, 0, 0, 0, 104, 682, 1, 0, 0, 0, 106, 684, 1, 0, 0, 0, 108, 689, 1, 0, 0, 0, 110, 691, 1, 0, 0, 0, 112, 697, 1, 0, 0, 0, 114, 703, 1, 0, 0, 0, 116, 708, 1, 0, 0, 0, 118, 710, 1, 0, 0, 0, 120, 713, 1, 0, 0, 0, 122, 716, 1, 0, 0, 0, 124, 721, 1, 0, 0, 0, 126, 725, 1, 0, 0, 0, 128, 730, 1, 0, 0, 0, 130, 736, 1, 0, 0, 0, 132, 739, 1, 0, 0, 0, 134, 741, 1, 0, 0, 0, 136, 747, 1, 0, 0, 0, 138, 749, 1, 0, 0, 0, 140, 754, 1, 0, 0, 0, 142, 757, 1, 0, 0, 0, 144, 760, 1, 0, 0, 0, 146, 763, 1, 0, 0, 0, 148, 765, 1, 0, 0, 0, 150, 768, 1, 0, 0, 0, 152, 770, 1, 0, 0, 0, 154, 773, 1, 0, 0, 0, 156, 775, 1, 0, 0, 0, 158, 777, 1, 0, 0, 0, 160, 779, 1, 0, 0, 0, 162, 781, 1, 0, 0, 0, 164, 783, 1, 0, 0, 0, 166, 788, 1, 0, 0, 0, 168, 809, 1, 0, 0, 0, 170, 811, 1, 0, 0, 0, 172, 819, 1, 0, 0, 0, 174, 821, 1, 0, 0, 0, 176, 825, 1, 0, 0, 0, 178, 829, 1, 0, 0, 0, 180, 833, 1, 0, 0, 0, 182, 838, 1, 0, 0, 0, 184, 842, 1, 0, 0, 0, 186, 846, 1, 0, 0, 0, 188, 850, 1, 0, 0, 0, 190, 854, 1, 0, 0, 0, 192, 858, 1, 0, 0, 0, 194, 866, 1, 0, 0, 0, 196, 878, 1, 0, 0, 0, 198, 881, 1, 0, 0, 0, 200, 885, 1, 0, 0, 0, 202, 889, 1, 0, 0, 0, 204, 893, 1, 0, 0, 0, 206, 897, 1, 0, 0, 0, 208, 902, 1, 0, 0, 0, 210, 906, 1, 0, 0, 0, 212, 914, 1, 0, 0, 0, 214, 935, 1, 0, 0, 0, 216, 939, 1, 0, 0, 0, 218, 943, 1, 0, 0, 0, 220, 947, 1, 0, 0, 0, 222, 951, 1, 0, 0, 0, 224, 955, 1, 0, 0, 0, 226, 960, 1, 0, 0, 0, 228, 964, 1, 0, 0, 0, 230, 968, 1, 0, 0, 0, 232, 972, 1, 0, 0, 0, 234, 975, 1, 0, 0, 0, 236, 979, 1, 0, 0, 0, 238, 983, 1, 0, 0, 0, 240, 987, 1, 0, 0, 0, 242, 991, 1, 0, 0, 0, 244, 996, 1, 0, 0, 0, 246, 1001, 1, 0, 0, 0, 248, 1006, 1, 0, 0, 0, 250, 1013, 1, 0, 0, 0, 252, 1022, 1, 0, 0, 0, 254, 1029, 1, 0, 0, 0, 256, 1033, 1, 0, 0, 0, 258, 1037, 1, 0, 0, 0, 260, 1041, 1, 0, 0, 0, 262, 1045, 1, 0, 0, 0, 264, 1049, 1, 0, 0, 0, 266, 1055, 1, 0, 0, 0, 268, 1059, 1, 0, 0, 0, 270, 1063, 1, 0, 0, 0, 272, 1067, 1, 0, 0, 0, 274, 1071, 1, 0, 0, 0, 276, 1075, 1, 0, 0, 0, 278, 1079, 1, 0, 0, 0, 280, 1083, 1, 0, 0, 0, 282, 1087, 1, 0, 0, 0, 284, 1091, 1, 0, 0, 0, 286, 1096, 1, 0, 0, 0, 288, 1100, 1, 0, 0, 0, 290, 1104, 1, 0, 0, 0, 292, 1108, 1, 0, 0, 0, 294, 1112, 1, 0, 0, 0, 296, 1116, 1, 0, 0, 0, 298, 1120, 1, 0, 0, 0, 300, 1125, 1, 0, 0, 0, 302, 1130, 1, 0, 0, 0, 304, 1134, 1, 0, 0, 0, 306, 1138, 1, 0, 0, 0, 308, 1142, 1, 0, 0, 0, 310, 1147, 1, 0, 0, 0, 312, 1157, 1, 0, 0, 0, 314, 1161, 1, 0, 0, 0, 316, 1165, 1, 0, 0, 0, 318, 1169, 1, 0, 0, 0, 320, 1174, 1, 0, 0, 0, 322, 1181, 1, 0, 0, 0, 324, 1185, 1, 0, 0, 0, 326, 1189, 1, 0, 0, 0, 328, 1193, 1, 0, 0, 0, 330, 331, 5, 100, 0, 0, 331, 332, 5, 105, 0, 0, 332, 333, 5, 115, 0, 0, 333, 334, 5, 115, 0, 0, 334, 335, 5, 101, 0, 0, 335, 336, 5, 99, 0, 0, 336, 337, 5, 116, 0, 0, 337, 338, 1, 0, 0, 0, 338, 339, 6, 0, 0, 0, 339, 13, 1, 0, 0, 0, 340, 341, 5, 100, 0, 0, 341, 342, 5, 114, 0, 0, 342, 343, 5, 111, 0, 0, 343, 344, 5, 112, 0, 0, 344, 345, 1, 0, 0, 0, 345, 346, 6, 1, 1, 0, 346, 15, 1, 0, 0, 0, 347, 348, 5, 101, 0, 0, 348, 349, 5, 110, 0, 0, 349, 350, 5, 114, 0, 0, 350, 351, 5, 105, 0, 0, 351, 352, 5, 99, 0, 0, 352, 353, 5, 104, 0, 0, 353, 354, 1, 0, 0, 0, 354, 355, 6, 2, 2, 0, 355, 17, 1, 0, 0, 0, 356, 357, 5, 101, 0, 0, 357, 358, 5, 118, 0, 0, 358, 359, 5, 97, 0, 0, 359, 360, 5, 108, 0, 0, 360, 361, 1, 0, 0, 0, 361, 362, 6, 3, 0, 0, 362, 19, 1, 0, 0, 0, 363, 364, 5, 101, 0, 0, 364, 365, 5, 120, 0, 0, 365, 366, 5, 112, 0, 0, 366, 367, 5, 108, 0, 0, 367, 368, 5, 97, 0, 0, 368, 369, 5, 105, 0, 0, 369, 370, 5, 110, 0, 0, 370, 371, 1, 0, 0, 0, 371, 372, 6, 4, 3, 0, 372, 21, 1, 0, 0, 0, 373, 374, 5, 102, 0, 0, 374, 375, 5, 114, 0, 0, 375, 376, 5, 111, 0, 0, 376, 377, 5, 109, 0, 0, 377, 378, 1, 0, 0, 0, 378, 379, 6, 5, 4, 0, 379, 23, 1, 0, 0, 0, 380, 381, 5, 103, 0, 0, 381, 382, 5, 114, 0, 0, 382, 383, 5, 111, 0, 0, 383, 384, 5, 107, 0, 0, 384, 385, 1, 0, 0, 0, 385, 386, 6, 6, 0, 0, 386, 25, 1, 0, 0, 0, 387, 388, 5, 105, 0, 0, 388, 389, 5, 110, 0, 0, 389, 390, 5, 108, 0, 0, 390, 391, 5, 105, 0, 0, 391, 392, 5, 110, 0, 0, 392, 393, 5, 101, 0, 0, 393, 394, 5, 115, 0, 0, 394, 395, 5, 116, 0, 0, 395, 396, 5, 97, 0, 0, 396, 397, 5, 116, 0, 0, 397, 398, 5, 115, 0, 0, 398, 399, 1, 0, 0, 0, 399, 400, 6, 7, 0, 0, 400, 27, 1, 0, 0, 0, 401, 402, 5, 107, 0, 0, 402, 403, 5, 101, 0, 0, 403, 404, 5, 101, 0, 0, 404, 405, 5, 112, 0, 0, 405, 406, 1, 0, 0, 0, 406, 407, 6, 8, 1, 0, 407, 29, 1, 0, 0, 0, 408, 409, 5, 108, 0, 0, 409, 410, 5, 105, 0, 0, 410, 411, 5, 109, 0, 0, 411, 412, 5, 105, 0, 0, 412, 413, 5, 116, 0, 0, 413, 414, 1, 0, 0, 0, 414, 415, 6, 9, 0, 0, 415, 31, 1, 0, 0, 0, 416, 417, 5, 109, 0, 0, 417, 418, 5, 101, 0, 0, 418, 419, 5, 116, 0, 0, 419, 420, 5, 97, 0, 0, 420, 421, 1, 0, 0, 0, 421, 422, 6, 10, 5, 0, 422, 33, 1, 0, 0, 0, 423, 424, 5, 109, 0, 0, 424, 425, 5, 118, 0, 0, 425, 426, 5, 95, 0, 0, 426, 427, 5, 101, 0, 0, 427, 428, 5, 120, 0, 0, 428, 429, 5, 112, 0, 0, 429, 430, 5, 97, 0, 0, 430, 431, 5, 110, 0, 0, 431, 432, 5, 100, 0, 0, 432, 433, 1, 0, 0, 0, 433, 434, 6, 11, 6, 0, 434, 35, 1, 0, 0, 0, 435, 436, 5, 114, 0, 0, 436, 437, 5, 101, 0, 0, 437, 438, 5, 110, 0, 0, 438, 439, 5, 97, 0, 0, 439, 440, 5, 109, 0, 0, 440, 441, 5, 101, 0, 0, 441, 442, 1, 0, 0, 0, 442, 443, 6, 12, 7, 0, 443, 37, 1, 0, 0, 0, 444, 445, 5, 114, 0, 0, 445, 446, 5, 111, 0, 0, 446, 447, 5, 119, 0, 0, 447, 448, 1, 0, 0, 0, 448, 449, 6, 13, 0, 0, 449, 39, 1, 0, 0, 0, 450, 451, 5, 115, 0, 0, 451, 452, 5, 104, 0, 0, 452, 453, 5, 111, 0, 0, 453, 454, 5, 119, 0, 0, 454, 455, 1, 0, 0, 0, 455, 456, 6, 14, 8, 0, 456, 41, 1, 0, 0, 0, 457, 458, 5, 115, 0, 0, 458, 459, 5, 111, 0, 0, 459, 460, 5, 114, 0, 0, 460, 461, 5, 116, 0, 0, 461, 462, 1, 0, 0, 0, 462, 463, 6, 15, 0, 0, 463, 43, 1, 0, 0, 0, 464, 465, 5, 115, 0, 0, 465, 466, 5, 116, 0, 0, 466, 467, 5, 97, 0, 0, 467, 468, 5, 116, 0, 0, 468, 469, 5, 115, 0, 0, 469, 470, 1, 0, 0, 0, 470, 471, 6, 16, 0, 0, 471, 45, 1, 0, 0, 0, 472, 473, 5, 119, 0, 0, 473, 474, 5, 104, 0, 0, 474, 475, 5, 101, 0, 0, 475, 476, 5, 114, 0, 0, 476, 477, 5, 101, 0, 0, 477, 478, 1, 0, 0, 0, 478, 479, 6, 17, 0, 0, 479, 47, 1, 0, 0, 0, 480, 482, 8, 0, 0, 0, 481, 480, 1, 0, 0, 0, 482, 483, 1, 0, 0, 0, 483, 481, 1, 0, 0, 0, 483, 484, 1, 0, 0, 0, 484, 485, 1, 0, 0, 0, 485, 486, 6, 18, 0, 0, 486, 49, 1, 0, 0, 0, 487, 488, 5, 47, 0, 0, 488, 489, 5, 47, 0, 0, 489, 493, 1, 0, 0, 0, 490, 492, 8, 1, 0, 0, 491, 490, 1, 0, 0, 0, 492, 495, 1, 0, 0, 0, 493, 491, 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 497, 1, 0, 0, 0, 495, 493, 1, 0, 0, 0, 496, 498, 5, 13, 0, 0, 497, 496, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 500, 1, 0, 0, 0, 499, 501, 5, 10, 0, 0, 500, 499, 1, 0, 0, 0, 500, 501, 1, 0, 0, 0, 501, 502, 1, 0, 0, 0, 502, 503, 6, 19, 9, 0, 503, 51, 1, 0, 0, 0, 504, 505, 5, 47, 0, 0, 505, 506, 5, 42, 0, 0, 506, 511, 1, 0, 0, 0, 507, 510, 3, 52, 20, 0, 508, 510, 9, 0, 0, 0, 509, 507, 1, 0, 0, 0, 509, 508, 1, 0, 0, 0, 510, 513, 1, 0, 0, 0, 511, 512, 1, 0, 0, 0, 511, 509, 1, 0, 0, 0, 512, 514, 1, 0, 0, 0, 513, 511, 1, 0, 0, 0, 514, 515, 5, 42, 0, 0, 515, 516, 5, 47, 0, 0, 516, 517, 1, 0, 0, 0, 517, 518, 6, 20, 9, 0, 518, 53, 1, 0, 0, 0, 519, 521, 7, 2, 0, 0, 520, 519, 1, 0, 0, 0, 521, 522, 1, 0, 0, 0, 522, 520, 1, 0, 0, 0, 522, 523, 1, 0, 0, 0, 523, 524, 1, 0, 0, 0, 524, 525, 6, 21, 9, 0, 525, 55, 1, 0, 0, 0, 526, 527, 3, 164, 76, 0, 527, 528, 1, 0, 0, 0, 528, 529, 6, 22, 10, 0, 529, 530, 6, 22, 11, 0, 530, 57, 1, 0, 0, 0, 531, 532, 3, 66, 27, 0, 532, 533, 1, 0, 0, 0, 533, 534, 6, 23, 12, 0, 534, 535, 6, 23, 13, 0, 535, 59, 1, 0, 0, 0, 536, 537, 3, 54, 21, 0, 537, 538, 1, 0, 0, 0, 538, 539, 6, 24, 9, 0, 539, 61, 1, 0, 0, 0, 540, 541, 3, 50, 19, 0, 541, 542, 1, 0, 0, 0, 542, 543, 6, 25, 9, 0, 543, 63, 1, 0, 0, 0, 544, 545, 3, 52, 20, 0, 545, 546, 1, 0, 0, 0, 546, 547, 6, 26, 9, 0, 547, 65, 1, 0, 0, 0, 548, 549, 5, 124, 0, 0, 549, 550, 1, 0, 0, 0, 550, 551, 6, 27, 13, 0, 551, 67, 1, 0, 0, 0, 552, 553, 7, 3, 0, 0, 553, 69, 1, 0, 0, 0, 554, 555, 7, 4, 0, 0, 555, 71, 1, 0, 0, 0, 556, 557, 5, 92, 0, 0, 557, 558, 7, 5, 0, 0, 558, 73, 1, 0, 0, 0, 559, 560, 8, 6, 0, 0, 560, 75, 1, 0, 0, 0, 561, 563, 7, 7, 0, 0, 562, 564, 7, 8, 0, 0, 563, 562, 1, 0, 0, 0, 563, 564, 1, 0, 0, 0, 564, 566, 1, 0, 0, 0, 565, 567, 3, 68, 28, 0, 566, 565, 1, 0, 0, 0, 567, 568, 1, 0, 0, 0, 568, 566, 1, 0, 0, 0, 568, 569, 1, 0, 0, 0, 569, 77, 1, 0, 0, 0, 570, 571, 5, 64, 0, 0, 571, 79, 1, 0, 0, 0, 572, 573, 5, 96, 0, 0, 573, 81, 1, 0, 0, 0, 574, 578, 8, 9, 0, 0, 575, 576, 5, 96, 0, 0, 576, 578, 5, 96, 0, 0, 577, 574, 1, 0, 0, 0, 577, 575, 1, 0, 0, 0, 578, 83, 1, 0, 0, 0, 579, 580, 5, 95, 0, 0, 580, 85, 1, 0, 0, 0, 581, 585, 3, 70, 29, 0, 582, 585, 3, 68, 28, 0, 583, 585, 3, 84, 36, 0, 584, 581, 1, 0, 0, 0, 584, 582, 1, 0, 0, 0, 584, 583, 1, 0, 0, 0, 585, 87, 1, 0, 0, 0, 586, 591, 5, 34, 0, 0, 587, 590, 3, 72, 30, 0, 588, 590, 3, 74, 31, 0, 589, 587, 1, 0, 0, 0, 589, 588, 1, 0, 0, 0, 590, 593, 1, 0, 0, 0, 591, 589, 1, 0, 0, 0, 591, 592, 1, 0, 0, 0, 592, 594, 1, 0, 0, 0, 593, 591, 1, 0, 0, 0, 594, 616, 5, 34, 0, 0, 595, 596, 5, 34, 0, 0, 596, 597, 5, 34, 0, 0, 597, 598, 5, 34, 0, 0, 598, 602, 1, 0, 0, 0, 599, 601, 8, 1, 0, 0, 600, 599, 1, 0, 0, 0, 601, 604, 1, 0, 0, 0, 602, 603, 1, 0, 0, 0, 602, 600, 1, 0, 0, 0, 603, 605, 1, 0, 0, 0, 604, 602, 1, 0, 0, 0, 605, 606, 5, 34, 0, 0, 606, 607, 5, 34, 0, 0, 607, 608, 5, 34, 0, 0, 608, 610, 1, 0, 0, 0, 609, 611, 5, 34, 0, 0, 610, 609, 1, 0, 0, 0, 610, 611, 1, 0, 0, 0, 611, 613, 1, 0, 0, 0, 612, 614, 5, 34, 0, 0, 613, 612, 1, 0, 0, 0, 613, 614, 1, 0, 0, 0, 614, 616, 1, 0, 0, 0, 615, 586, 1, 0, 0, 0, 615, 595, 1, 0, 0, 0, 616, 89, 1, 0, 0, 0, 617, 619, 3, 68, 28, 0, 618, 617, 1, 0, 0, 0, 619, 620, 1, 0, 0, 0, 620, 618, 1, 0, 0, 0, 620, 621, 1, 0, 0, 0, 621, 91, 1, 0, 0, 0, 622, 624, 3, 68, 28, 0, 623, 622, 1, 0, 0, 0, 624, 625, 1, 0, 0, 0, 625, 623, 1, 0, 0, 0, 625, 626, 1, 0, 0, 0, 626, 627, 1, 0, 0, 0, 627, 631, 3, 108, 48, 0, 628, 630, 3, 68, 28, 0, 629, 628, 1, 0, 0, 0, 630, 633, 1, 0, 0, 0, 631, 629, 1, 0, 0, 0, 631, 632, 1, 0, 0, 0, 632, 665, 1, 0, 0, 0, 633, 631, 1, 0, 0, 0, 634, 636, 3, 108, 48, 0, 635, 637, 3, 68, 28, 0, 636, 635, 1, 0, 0, 0, 637, 638, 1, 0, 0, 0, 638, 636, 1, 0, 0, 0, 638, 639, 1, 0, 0, 0, 639, 665, 1, 0, 0, 0, 640, 642, 3, 68, 28, 0, 641, 640, 1, 0, 0, 0, 642, 643, 1, 0, 0, 0, 643, 641, 1, 0, 0, 0, 643, 644, 1, 0, 0, 0, 644, 652, 1, 0, 0, 0, 645, 649, 3, 108, 48, 0, 646, 648, 3, 68, 28, 0, 647, 646, 1, 0, 0, 0, 648, 651, 1, 0, 0, 0, 649, 647, 1, 0, 0, 0, 649, 650, 1, 0, 0, 0, 650, 653, 1, 0, 0, 0, 651, 649, 1, 0, 0, 0, 652, 645, 1, 0, 0, 0, 652, 653, 1, 0, 0, 0, 653, 654, 1, 0, 0, 0, 654, 655, 3, 76, 32, 0, 655, 665, 1, 0, 0, 0, 656, 658, 3, 108, 48, 0, 657, 659, 3, 68, 28, 0, 658, 657, 1, 0, 0, 0, 659, 660, 1, 0, 0, 0, 660, 658, 1, 0, 0, 0, 660, 661, 1, 0, 0, 0, 661, 662, 1, 0, 0, 0, 662, 663, 3, 76, 32, 0, 663, 665, 1, 0, 0, 0, 664, 623, 1, 0, 0, 0, 664, 634, 1, 0, 0, 0, 664, 641, 1, 0, 0, 0, 664, 656, 1, 0, 0, 0, 665, 93, 1, 0, 0, 0, 666, 667, 5, 98, 0, 0, 667, 668, 5, 121, 0, 0, 668, 95, 1, 0, 0, 0, 669, 670, 5, 97, 0, 0, 670, 671, 5, 110, 0, 0, 671, 672, 5, 100, 0, 0, 672, 97, 1, 0, 0, 0, 673, 674, 5, 97, 0, 0, 674, 675, 5, 115, 0, 0, 675, 676, 5, 99, 0, 0, 676, 99, 1, 0, 0, 0, 677, 678, 5, 61, 0, 0, 678, 101, 1, 0, 0, 0, 679, 680, 5, 58, 0, 0, 680, 681, 5, 58, 0, 0, 681, 103, 1, 0, 0, 0, 682, 683, 5, 44, 0, 0, 683, 105, 1, 0, 0, 0, 684, 685, 5, 100, 0, 0, 685, 686, 5, 101, 0, 0, 686, 687, 5, 115, 0, 0, 687, 688, 5, 99, 0, 0, 688, 107, 1, 0, 0, 0, 689, 690, 5, 46, 0, 0, 690, 109, 1, 0, 0, 0, 691, 692, 5, 102, 0, 0, 692, 693, 5, 97, 0, 0, 693, 694, 5, 108, 0, 0, 694, 695, 5, 115, 0, 0, 695, 696, 5, 101, 0, 0, 696, 111, 1, 0, 0, 0, 697, 698, 5, 102, 0, 0, 698, 699, 5, 105, 0, 0, 699, 700, 5, 114, 0, 0, 700, 701, 5, 115, 0, 0, 701, 702, 5, 116, 0, 0, 702, 113, 1, 0, 0, 0, 703, 704, 5, 108, 0, 0, 704, 705, 5, 97, 0, 0, 705, 706, 5, 115, 0, 0, 706, 707, 5, 116, 0, 0, 707, 115, 1, 0, 0, 0, 708, 709, 5, 40, 0, 0, 709, 117, 1, 0, 0, 0, 710, 711, 5, 105, 0, 0, 711, 712, 5, 110, 0, 0, 712, 119, 1, 0, 0, 0, 713, 714, 5, 105, 0, 0, 714, 715, 5, 115, 0, 0, 715, 121, 1, 0, 0, 0, 716, 717, 5, 108, 0, 0, 717, 718, 5, 105, 0, 0, 718, 719, 5, 107, 0, 0, 719, 720, 5, 101, 0, 0, 720, 123, 1, 0, 0, 0, 721, 722, 5, 110, 0, 0, 722, 723, 5, 111, 0, 0, 723, 724, 5, 116, 0, 0, 724, 125, 1, 0, 0, 0, 725, 726, 5, 110, 0, 0, 726, 727, 5, 117, 0, 0, 727, 728, 5, 108, 0, 0, 728, 729, 5, 108, 0, 0, 729, 127, 1, 0, 0, 0, 730, 731, 5, 110, 0, 0, 731, 732, 5, 117, 0, 0, 732, 733, 5, 108, 0, 0, 733, 734, 5, 108, 0, 0, 734, 735, 5, 115, 0, 0, 735, 129, 1, 0, 0, 0, 736, 737, 5, 111, 0, 0, 737, 738, 5, 114, 0, 0, 738, 131, 1, 0, 0, 0, 739, 740, 5, 63, 0, 0, 740, 133, 1, 0, 0, 0, 741, 742, 5, 114, 0, 0, 742, 743, 5, 108, 0, 0, 743, 744, 5, 105, 0, 0, 744, 745, 5, 107, 0, 0, 745, 746, 5, 101, 0, 0, 746, 135, 1, 0, 0, 0, 747, 748, 5, 41, 0, 0, 748, 137, 1, 0, 0, 0, 749, 750, 5, 116, 0, 0, 750, 751, 5, 114, 0, 0, 751, 752, 5, 117, 0, 0, 752, 753, 5, 101, 0, 0, 753, 139, 1, 0, 0, 0, 754, 755, 5, 61, 0, 0, 755, 756, 5, 61, 0, 0, 756, 141, 1, 0, 0, 0, 757, 758, 5, 61, 0, 0, 758, 759, 5, 126, 0, 0, 759, 143, 1, 0, 0, 0, 760, 761, 5, 33, 0, 0, 761, 762, 5, 61, 0, 0, 762, 145, 1, 0, 0, 0, 763, 764, 5, 60, 0, 0, 764, 147, 1, 0, 0, 0, 765, 766, 5, 60, 0, 0, 766, 767, 5, 61, 0, 0, 767, 149, 1, 0, 0, 0, 768, 769, 5, 62, 0, 0, 769, 151, 1, 0, 0, 0, 770, 771, 5, 62, 0, 0, 771, 772, 5, 61, 0, 0, 772, 153, 1, 0, 0, 0, 773, 774, 5, 43, 0, 0, 774, 155, 1, 0, 0, 0, 775, 776, 5, 45, 0, 0, 776, 157, 1, 0, 0, 0, 777, 778, 5, 42, 0, 0, 778, 159, 1, 0, 0, 0, 779, 780, 5, 47, 0, 0, 780, 161, 1, 0, 0, 0, 781, 782, 5, 37, 0, 0, 782, 163, 1, 0, 0, 0, 783, 784, 5, 91, 0, 0, 784, 785, 1, 0, 0, 0, 785, 786, 6, 76, 0, 0, 786, 787, 6, 76, 0, 0, 787, 165, 1, 0, 0, 0, 788, 789, 5, 93, 0, 0, 789, 790, 1, 0, 0, 0, 790, 791, 6, 77, 13, 0, 791, 792, 6, 77, 13, 0, 792, 167, 1, 0, 0, 0, 793, 797, 3, 70, 29, 0, 794, 796, 3, 86, 37, 0, 795, 794, 1, 0, 0, 0, 796, 799, 1, 0, 0, 0, 797, 795, 1, 0, 0, 0, 797, 798, 1, 0, 0, 0, 798, 810, 1, 0, 0, 0, 799, 797, 1, 0, 0, 0, 800, 803, 3, 84, 36, 0, 801, 803, 3, 78, 33, 0, 802, 800, 1, 0, 0, 0, 802, 801, 1, 0, 0, 0, 803, 805, 1, 0, 0, 0, 804, 806, 3, 86, 37, 0, 805, 804, 1, 0, 0, 0, 806, 807, 1, 0, 0, 0, 807, 805, 1, 0, 0, 0, 807, 808, 1, 0, 0, 0, 808, 810, 1, 0, 0, 0, 809, 793, 1, 0, 0, 0, 809, 802, 1, 0, 0, 0, 810, 169, 1, 0, 0, 0, 811, 813, 3, 80, 34, 0, 812, 814, 3, 82, 35, 0, 813, 812, 1, 0, 0, 0, 814, 815, 1, 0, 0, 0, 815, 813, 1, 0, 0, 0, 815, 816, 1, 0, 0, 0, 816, 817, 1, 0, 0, 0, 817, 818, 3, 80, 34, 0, 818, 171, 1, 0, 0, 0, 819, 820, 3, 170, 79, 0, 820, 173, 1, 0, 0, 0, 821, 822, 3, 50, 19, 0, 822, 823, 1, 0, 0, 0, 823, 824, 6, 81, 9, 0, 824, 175, 1, 0, 0, 0, 825, 826, 3, 52, 20, 0, 826, 827, 1, 0, 0, 0, 827, 828, 6, 82, 9, 0, 828, 177, 1, 0, 0, 0, 829, 830, 3, 54, 21, 0, 830, 831, 1, 0, 0, 0, 831, 832, 6, 83, 9, 0, 832, 179, 1, 0, 0, 0, 833, 834, 3, 66, 27, 0, 834, 835, 1, 0, 0, 0, 835, 836, 6, 84, 12, 0, 836, 837, 6, 84, 13, 0, 837, 181, 1, 0, 0, 0, 838, 839, 3, 164, 76, 0, 839, 840, 1, 0, 0, 0, 840, 841, 6, 85, 10, 0, 841, 183, 1, 0, 0, 0, 842, 843, 3, 166, 77, 0, 843, 844, 1, 0, 0, 0, 844, 845, 6, 86, 14, 0, 845, 185, 1, 0, 0, 0, 846, 847, 3, 104, 46, 0, 847, 848, 1, 0, 0, 0, 848, 849, 6, 87, 15, 0, 849, 187, 1, 0, 0, 0, 850, 851, 3, 100, 44, 0, 851, 852, 1, 0, 0, 0, 852, 853, 6, 88, 16, 0, 853, 189, 1, 0, 0, 0, 854, 855, 3, 88, 38, 0, 855, 856, 1, 0, 0, 0, 856, 857, 6, 89, 17, 0, 857, 191, 1, 0, 0, 0, 858, 859, 5, 111, 0, 0, 859, 860, 5, 112, 0, 0, 860, 861, 5, 116, 0, 0, 861, 862, 5, 105, 0, 0, 862, 863, 5, 111, 0, 0, 863, 864, 5, 110, 0, 0, 864, 865, 5, 115, 0, 0, 865, 193, 1, 0, 0, 0, 866, 867, 5, 109, 0, 0, 867, 868, 5, 101, 0, 0, 868, 869, 5, 116, 0, 0, 869, 870, 5, 97, 0, 0, 870, 871, 5, 100, 0, 0, 871, 872, 5, 97, 0, 0, 872, 873, 5, 116, 0, 0, 873, 874, 5, 97, 0, 0, 874, 195, 1, 0, 0, 0, 875, 879, 8, 10, 0, 0, 876, 877, 5, 47, 0, 0, 877, 879, 8, 11, 0, 0, 878, 875, 1, 0, 0, 0, 878, 876, 1, 0, 0, 0, 879, 197, 1, 0, 0, 0, 880, 882, 3, 196, 92, 0, 881, 880, 1, 0, 0, 0, 882, 883, 1, 0, 0, 0, 883, 881, 1, 0, 0, 0, 883, 884, 1, 0, 0, 0, 884, 199, 1, 0, 0, 0, 885, 886, 3, 50, 19, 0, 886, 887, 1, 0, 0, 0, 887, 888, 6, 94, 9, 0, 888, 201, 1, 0, 0, 0, 889, 890, 3, 52, 20, 0, 890, 891, 1, 0, 0, 0, 891, 892, 6, 95, 9, 0, 892, 203, 1, 0, 0, 0, 893, 894, 3, 54, 21, 0, 894, 895, 1, 0, 0, 0, 895, 896, 6, 96, 9, 0, 896, 205, 1, 0, 0, 0, 897, 898, 3, 66, 27, 0, 898, 899, 1, 0, 0, 0, 899, 900, 6, 97, 12, 0, 900, 901, 6, 97, 13, 0, 901, 207, 1, 0, 0, 0, 902, 903, 3, 108, 48, 0, 903, 904, 1, 0, 0, 0, 904, 905, 6, 98, 18, 0, 905, 209, 1, 0, 0, 0, 906, 907, 3, 104, 46, 0, 907, 908, 1, 0, 0, 0, 908, 909, 6, 99, 15, 0, 909, 211, 1, 0, 0, 0, 910, 915, 3, 70, 29, 0, 911, 915, 3, 68, 28, 0, 912, 915, 3, 84, 36, 0, 913, 915, 3, 158, 73, 0, 914, 910, 1, 0, 0, 0, 914, 911, 1, 0, 0, 0, 914, 912, 1, 0, 0, 0, 914, 913, 1, 0, 0, 0, 915, 213, 1, 0, 0, 0, 916, 919, 3, 70, 29, 0, 917, 919, 3, 158, 73, 0, 918, 916, 1, 0, 0, 0, 918, 917, 1, 0, 0, 0, 919, 923, 1, 0, 0, 0, 920, 922, 3, 212, 100, 0, 921, 920, 1, 0, 0, 0, 922, 925, 1, 0, 0, 0, 923, 921, 1, 0, 0, 0, 923, 924, 1, 0, 0, 0, 924, 936, 1, 0, 0, 0, 925, 923, 1, 0, 0, 0, 926, 929, 3, 84, 36, 0, 927, 929, 3, 78, 33, 0, 928, 926, 1, 0, 0, 0, 928, 927, 1, 0, 0, 0, 929, 931, 1, 0, 0, 0, 930, 932, 3, 212, 100, 0, 931, 930, 1, 0, 0, 0, 932, 933, 1, 0, 0, 0, 933, 931, 1, 0, 0, 0, 933, 934, 1, 0, 0, 0, 934, 936, 1, 0, 0, 0, 935, 918, 1, 0, 0, 0, 935, 928, 1, 0, 0, 0, 936, 215, 1, 0, 0, 0, 937, 940, 3, 214, 101, 0, 938, 940, 3, 170, 79, 0, 939, 937, 1, 0, 0, 0, 939, 938, 1, 0, 0, 0, 940, 941, 1, 0, 0, 0, 941, 939, 1, 0, 0, 0, 941, 942, 1, 0, 0, 0, 942, 217, 1, 0, 0, 0, 943, 944, 3, 50, 19, 0, 944, 945, 1, 0, 0, 0, 945, 946, 6, 103, 9, 0, 946, 219, 1, 0, 0, 0, 947, 948, 3, 52, 20, 0, 948, 949, 1, 0, 0, 0, 949, 950, 6, 104, 9, 0, 950, 221, 1, 0, 0, 0, 951, 952, 3, 54, 21, 0, 952, 953, 1, 0, 0, 0, 953, 954, 6, 105, 9, 0, 954, 223, 1, 0, 0, 0, 955, 956, 3, 66, 27, 0, 956, 957, 1, 0, 0, 0, 957, 958, 6, 106, 12, 0, 958, 959, 6, 106, 13, 0, 959, 225, 1, 0, 0, 0, 960, 961, 3, 100, 44, 0, 961, 962, 1, 0, 0, 0, 962, 963, 6, 107, 16, 0, 963, 227, 1, 0, 0, 0, 964, 965, 3, 104, 46, 0, 965, 966, 1, 0, 0, 0, 966, 967, 6, 108, 15, 0, 967, 229, 1, 0, 0, 0, 968, 969, 3, 108, 48, 0, 969, 970, 1, 0, 0, 0, 970, 971, 6, 109, 18, 0, 971, 231, 1, 0, 0, 0, 972, 973, 5, 97, 0, 0, 973, 974, 5, 115, 0, 0, 974, 233, 1, 0, 0, 0, 975, 976, 3, 216, 102, 0, 976, 977, 1, 0, 0, 0, 977, 978, 6, 111, 19, 0, 978, 235, 1, 0, 0, 0, 979, 980, 3, 50, 19, 0, 980, 981, 1, 0, 0, 0, 981, 982, 6, 112, 9, 0, 982, 237, 1, 0, 0, 0, 983, 984, 3, 52, 20, 0, 984, 985, 1, 0, 0, 0, 985, 986, 6, 113, 9, 0, 986, 239, 1, 0, 0, 0, 987, 988, 3, 54, 21, 0, 988, 989, 1, 0, 0, 0, 989, 990, 6, 114, 9, 0, 990, 241, 1, 0, 0, 0, 991, 992, 3, 66, 27, 0, 992, 993, 1, 0, 0, 0, 993, 994, 6, 115, 12, 0, 994, 995, 6, 115, 13, 0, 995, 243, 1, 0, 0, 0, 996, 997, 3, 164, 76, 0, 997, 998, 1, 0, 0, 0, 998, 999, 6, 116, 10, 0, 999, 1000, 6, 116, 20, 0, 1000, 245, 1, 0, 0, 0, 1001, 1002, 5, 111, 0, 0, 1002, 1003, 5, 110, 0, 0, 1003, 1004, 1, 0, 0, 0, 1004, 1005, 6, 117, 21, 0, 1005, 247, 1, 0, 0, 0, 1006, 1007, 5, 119, 0, 0, 1007, 1008, 5, 105, 0, 0, 1008, 1009, 5, 116, 0, 0, 1009, 1010, 5, 104, 0, 0, 1010, 1011, 1, 0, 0, 0, 1011, 1012, 6, 118, 21, 0, 1012, 249, 1, 0, 0, 0, 1013, 1014, 8, 12, 0, 0, 1014, 251, 1, 0, 0, 0, 1015, 1017, 3, 250, 119, 0, 1016, 1015, 1, 0, 0, 0, 1017, 1018, 1, 0, 0, 0, 1018, 1016, 1, 0, 0, 0, 1018, 1019, 1, 0, 0, 0, 1019, 1020, 1, 0, 0, 0, 1020, 1021, 3, 320, 154, 0, 1021, 1023, 1, 0, 0, 0, 1022, 1016, 1, 0, 0, 0, 1022, 1023, 1, 0, 0, 0, 1023, 1025, 1, 0, 0, 0, 1024, 1026, 3, 250, 119, 0, 1025, 1024, 1, 0, 0, 0, 1026, 1027, 1, 0, 0, 0, 1027, 1025, 1, 0, 0, 0, 1027, 1028, 1, 0, 0, 0, 1028, 253, 1, 0, 0, 0, 1029, 1030, 3, 172, 80, 0, 1030, 1031, 1, 0, 0, 0, 1031, 1032, 6, 121, 22, 0, 1032, 255, 1, 0, 0, 0, 1033, 1034, 3, 252, 120, 0, 1034, 1035, 1, 0, 0, 0, 1035, 1036, 6, 122, 23, 0, 1036, 257, 1, 0, 0, 0, 1037, 1038, 3, 50, 19, 0, 1038, 1039, 1, 0, 0, 0, 1039, 1040, 6, 123, 9, 0, 1040, 259, 1, 0, 0, 0, 1041, 1042, 3, 52, 20, 0, 1042, 1043, 1, 0, 0, 0, 1043, 1044, 6, 124, 9, 0, 1044, 261, 1, 0, 0, 0, 1045, 1046, 3, 54, 21, 0, 1046, 1047, 1, 0, 0, 0, 1047, 1048, 6, 125, 9, 0, 1048, 263, 1, 0, 0, 0, 1049, 1050, 3, 66, 27, 0, 1050, 1051, 1, 0, 0, 0, 1051, 1052, 6, 126, 12, 0, 1052, 1053, 6, 126, 13, 0, 1053, 1054, 6, 126, 13, 0, 1054, 265, 1, 0, 0, 0, 1055, 1056, 3, 100, 44, 0, 1056, 1057, 1, 0, 0, 0, 1057, 1058, 6, 127, 16, 0, 1058, 267, 1, 0, 0, 0, 1059, 1060, 3, 104, 46, 0, 1060, 1061, 1, 0, 0, 0, 1061, 1062, 6, 128, 15, 0, 1062, 269, 1, 0, 0, 0, 1063, 1064, 3, 108, 48, 0, 1064, 1065, 1, 0, 0, 0, 1065, 1066, 6, 129, 18, 0, 1066, 271, 1, 0, 0, 0, 1067, 1068, 3, 248, 118, 0, 1068, 1069, 1, 0, 0, 0, 1069, 1070, 6, 130, 24, 0, 1070, 273, 1, 0, 0, 0, 1071, 1072, 3, 216, 102, 0, 1072, 1073, 1, 0, 0, 0, 1073, 1074, 6, 131, 19, 0, 1074, 275, 1, 0, 0, 0, 1075, 1076, 3, 172, 80, 0, 1076, 1077, 1, 0, 0, 0, 1077, 1078, 6, 132, 22, 0, 1078, 277, 1, 0, 0, 0, 1079, 1080, 3, 50, 19, 0, 1080, 1081, 1, 0, 0, 0, 1081, 1082, 6, 133, 9, 0, 1082, 279, 1, 0, 0, 0, 1083, 1084, 3, 52, 20, 0, 1084, 1085, 1, 0, 0, 0, 1085, 1086, 6, 134, 9, 0, 1086, 281, 1, 0, 0, 0, 1087, 1088, 3, 54, 21, 0, 1088, 1089, 1, 0, 0, 0, 1089, 1090, 6, 135, 9, 0, 1090, 283, 1, 0, 0, 0, 1091, 1092, 3, 66, 27, 0, 1092, 1093, 1, 0, 0, 0, 1093, 1094, 6, 136, 12, 0, 1094, 1095, 6, 136, 13, 0, 1095, 285, 1, 0, 0, 0, 1096, 1097, 3, 108, 48, 0, 1097, 1098, 1, 0, 0, 0, 1098, 1099, 6, 137, 18, 0, 1099, 287, 1, 0, 0, 0, 1100, 1101, 3, 172, 80, 0, 1101, 1102, 1, 0, 0, 0, 1102, 1103, 6, 138, 22, 0, 1103, 289, 1, 0, 0, 0, 1104, 1105, 3, 168, 78, 0, 1105, 1106, 1, 0, 0, 0, 1106, 1107, 6, 139, 25, 0, 1107, 291, 1, 0, 0, 0, 1108, 1109, 3, 50, 19, 0, 1109, 1110, 1, 0, 0, 0, 1110, 1111, 6, 140, 9, 0, 1111, 293, 1, 0, 0, 0, 1112, 1113, 3, 52, 20, 0, 1113, 1114, 1, 0, 0, 0, 1114, 1115, 6, 141, 9, 0, 1115, 295, 1, 0, 0, 0, 1116, 1117, 3, 54, 21, 0, 1117, 1118, 1, 0, 0, 0, 1118, 1119, 6, 142, 9, 0, 1119, 297, 1, 0, 0, 0, 1120, 1121, 3, 66, 27, 0, 1121, 1122, 1, 0, 0, 0, 1122, 1123, 6, 143, 12, 0, 1123, 1124, 6, 143, 13, 0, 1124, 299, 1, 0, 0, 0, 1125, 1126, 5, 105, 0, 0, 1126, 1127, 5, 110, 0, 0, 1127, 1128, 5, 102, 0, 0, 1128, 1129, 5, 111, 0, 0, 1129, 301, 1, 0, 0, 0, 1130, 1131, 3, 50, 19, 0, 1131, 1132, 1, 0, 0, 0, 1132, 1133, 6, 145, 9, 0, 1133, 303, 1, 0, 0, 0, 1134, 1135, 3, 52, 20, 0, 1135, 1136, 1, 0, 0, 0, 1136, 1137, 6, 146, 9, 0, 1137, 305, 1, 0, 0, 0, 1138, 1139, 3, 54, 21, 0, 1139, 1140, 1, 0, 0, 0, 1140, 1141, 6, 147, 9, 0, 1141, 307, 1, 0, 0, 0, 1142, 1143, 3, 66, 27, 0, 1143, 1144, 1, 0, 0, 0, 1144, 1145, 6, 148, 12, 0, 1145, 1146, 6, 148, 13, 0, 1146, 309, 1, 0, 0, 0, 1147, 1148, 5, 102, 0, 0, 1148, 1149, 5, 117, 0, 0, 1149, 1150, 5, 110, 0, 0, 1150, 1151, 5, 99, 0, 0, 1151, 1152, 5, 116, 0, 0, 1152, 1153, 5, 105, 0, 0, 1153, 1154, 5, 111, 0, 0, 1154, 1155, 5, 110, 0, 0, 1155, 1156, 5, 115, 0, 0, 1156, 311, 1, 0, 0, 0, 1157, 1158, 3, 50, 19, 0, 1158, 1159, 1, 0, 0, 0, 1159, 1160, 6, 150, 9, 0, 1160, 313, 1, 0, 0, 0, 1161, 1162, 3, 52, 20, 0, 1162, 1163, 1, 0, 0, 0, 1163, 1164, 6, 151, 9, 0, 1164, 315, 1, 0, 0, 0, 1165, 1166, 3, 54, 21, 0, 1166, 1167, 1, 0, 0, 0, 1167, 1168, 6, 152, 9, 0, 1168, 317, 1, 0, 0, 0, 1169, 1170, 3, 166, 77, 0, 1170, 1171, 1, 0, 0, 0, 1171, 1172, 6, 153, 14, 0, 1172, 1173, 6, 153, 13, 0, 1173, 319, 1, 0, 0, 0, 1174, 1175, 5, 58, 0, 0, 1175, 321, 1, 0, 0, 0, 1176, 1182, 3, 78, 33, 0, 1177, 1182, 3, 68, 28, 0, 1178, 1182, 3, 108, 48, 0, 1179, 1182, 3, 70, 29, 0, 1180, 1182, 3, 84, 36, 0, 1181, 1176, 1, 0, 0, 0, 1181, 1177, 1, 0, 0, 0, 1181, 1178, 1, 0, 0, 0, 1181, 1179, 1, 0, 0, 0, 1181, 1180, 1, 0, 0, 0, 1182, 1183, 1, 0, 0, 0, 1183, 1181, 1, 0, 0, 0, 1183, 1184, 1, 0, 0, 0, 1184, 323, 1, 0, 0, 0, 1185, 1186, 3, 50, 19, 0, 1186, 1187, 1, 0, 0, 0, 1187, 1188, 6, 156, 9, 0, 1188, 325, 1, 0, 0, 0, 1189, 1190, 3, 52, 20, 0, 1190, 1191, 1, 0, 0, 0, 1191, 1192, 6, 157, 9, 0, 1192, 327, 1, 0, 0, 0, 1193, 1194, 3, 54, 21, 0, 1194, 1195, 1, 0, 0, 0, 1195, 1196, 6, 158, 9, 0, 1196, 329, 1, 0, 0, 0, 58, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 483, 493, 497, 500, 509, 511, 522, 563, 568, 577, 584, 589, 591, 602, 610, 613, 615, 620, 625, 631, 638, 643, 649, 652, 660, 664, 797, 802, 807, 809, 815, 878, 883, 914, 918, 923, 928, 933, 935, 939, 941, 1018, 1022, 1027, 1181, 1183, 26, 5, 2, 0, 5, 4, 0, 5, 6, 0, 5, 1, 0, 5, 3, 0, 5, 10, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 0, 1, 0, 7, 65, 0, 5, 0, 0, 7, 26, 0, 4, 0, 0, 7, 66, 0, 7, 35, 0, 7, 33, 0, 7, 27, 0, 7, 37, 0, 7, 78, 0, 5, 11, 0, 5, 7, 0, 7, 68, 0, 7, 88, 0, 7, 87, 0, 7, 67, 0] \ No newline at end of file +[4, 0, 117, 1307, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 4, 19, 524, 8, 19, 11, 19, 12, 19, 525, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 534, 8, 20, 10, 20, 12, 20, 537, 9, 20, 1, 20, 3, 20, 540, 8, 20, 1, 20, 3, 20, 543, 8, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 552, 8, 21, 10, 21, 12, 21, 555, 9, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 4, 22, 563, 8, 22, 11, 22, 12, 22, 564, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 3, 23, 572, 8, 23, 1, 24, 4, 24, 575, 8, 24, 11, 24, 12, 24, 576, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 35, 1, 35, 3, 35, 616, 8, 35, 1, 35, 4, 35, 619, 8, 35, 11, 35, 12, 35, 620, 1, 36, 1, 36, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 3, 38, 630, 8, 38, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 3, 40, 637, 8, 40, 1, 41, 1, 41, 1, 41, 5, 41, 642, 8, 41, 10, 41, 12, 41, 645, 9, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 5, 41, 653, 8, 41, 10, 41, 12, 41, 656, 9, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 3, 41, 663, 8, 41, 1, 41, 3, 41, 666, 8, 41, 3, 41, 668, 8, 41, 1, 42, 4, 42, 671, 8, 42, 11, 42, 12, 42, 672, 1, 43, 4, 43, 676, 8, 43, 11, 43, 12, 43, 677, 1, 43, 1, 43, 5, 43, 682, 8, 43, 10, 43, 12, 43, 685, 9, 43, 1, 43, 1, 43, 4, 43, 689, 8, 43, 11, 43, 12, 43, 690, 1, 43, 4, 43, 694, 8, 43, 11, 43, 12, 43, 695, 1, 43, 1, 43, 5, 43, 700, 8, 43, 10, 43, 12, 43, 703, 9, 43, 3, 43, 705, 8, 43, 1, 43, 1, 43, 1, 43, 1, 43, 4, 43, 711, 8, 43, 11, 43, 12, 43, 712, 1, 43, 1, 43, 3, 43, 717, 8, 43, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 77, 1, 77, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 5, 81, 848, 8, 81, 10, 81, 12, 81, 851, 9, 81, 1, 81, 1, 81, 3, 81, 855, 8, 81, 1, 81, 4, 81, 858, 8, 81, 11, 81, 12, 81, 859, 3, 81, 862, 8, 81, 1, 82, 1, 82, 4, 82, 866, 8, 82, 11, 82, 12, 82, 867, 1, 82, 1, 82, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 3, 102, 961, 8, 102, 1, 103, 1, 103, 3, 103, 965, 8, 103, 1, 103, 5, 103, 968, 8, 103, 10, 103, 12, 103, 971, 9, 103, 1, 103, 1, 103, 3, 103, 975, 8, 103, 1, 103, 4, 103, 978, 8, 103, 11, 103, 12, 103, 979, 3, 103, 982, 8, 103, 1, 104, 1, 104, 4, 104, 986, 8, 104, 11, 104, 12, 104, 987, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 122, 4, 122, 1063, 8, 122, 11, 122, 12, 122, 1064, 1, 122, 1, 122, 3, 122, 1069, 8, 122, 1, 122, 4, 122, 1072, 8, 122, 11, 122, 12, 122, 1073, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 151, 1, 151, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 157, 4, 157, 1228, 8, 157, 11, 157, 12, 157, 1229, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 173, 2, 553, 654, 0, 174, 14, 1, 16, 2, 18, 3, 20, 4, 22, 5, 24, 6, 26, 7, 28, 8, 30, 9, 32, 10, 34, 11, 36, 12, 38, 13, 40, 14, 42, 15, 44, 16, 46, 17, 48, 18, 50, 19, 52, 20, 54, 21, 56, 22, 58, 23, 60, 0, 62, 24, 64, 0, 66, 0, 68, 25, 70, 26, 72, 27, 74, 28, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 0, 90, 0, 92, 0, 94, 0, 96, 29, 98, 30, 100, 31, 102, 32, 104, 33, 106, 34, 108, 35, 110, 36, 112, 37, 114, 38, 116, 39, 118, 40, 120, 41, 122, 42, 124, 43, 126, 44, 128, 45, 130, 46, 132, 47, 134, 48, 136, 49, 138, 50, 140, 51, 142, 52, 144, 53, 146, 54, 148, 55, 150, 56, 152, 57, 154, 58, 156, 59, 158, 60, 160, 61, 162, 62, 164, 63, 166, 64, 168, 65, 170, 66, 172, 67, 174, 68, 176, 69, 178, 0, 180, 70, 182, 71, 184, 72, 186, 73, 188, 0, 190, 0, 192, 0, 194, 0, 196, 0, 198, 0, 200, 74, 202, 75, 204, 0, 206, 76, 208, 77, 210, 78, 212, 0, 214, 0, 216, 0, 218, 0, 220, 0, 222, 79, 224, 80, 226, 81, 228, 82, 230, 0, 232, 0, 234, 0, 236, 0, 238, 83, 240, 0, 242, 84, 244, 85, 246, 86, 248, 0, 250, 0, 252, 87, 254, 88, 256, 0, 258, 89, 260, 0, 262, 0, 264, 90, 266, 91, 268, 92, 270, 0, 272, 0, 274, 0, 276, 0, 278, 0, 280, 0, 282, 0, 284, 93, 286, 94, 288, 95, 290, 0, 292, 0, 294, 0, 296, 0, 298, 96, 300, 97, 302, 98, 304, 0, 306, 99, 308, 100, 310, 101, 312, 102, 314, 0, 316, 103, 318, 104, 320, 105, 322, 106, 324, 0, 326, 107, 328, 108, 330, 109, 332, 110, 334, 111, 336, 0, 338, 0, 340, 112, 342, 113, 344, 114, 346, 0, 348, 115, 350, 116, 352, 117, 354, 0, 356, 0, 358, 0, 360, 0, 14, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 13, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 10, 0, 9, 10, 13, 13, 32, 32, 44, 44, 47, 47, 61, 61, 91, 91, 93, 93, 96, 96, 124, 124, 2, 0, 42, 42, 47, 47, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 5, 0, 34, 34, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1332, 0, 14, 1, 0, 0, 0, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 0, 56, 1, 0, 0, 0, 0, 58, 1, 0, 0, 0, 0, 62, 1, 0, 0, 0, 1, 64, 1, 0, 0, 0, 1, 66, 1, 0, 0, 0, 1, 68, 1, 0, 0, 0, 1, 70, 1, 0, 0, 0, 1, 72, 1, 0, 0, 0, 2, 74, 1, 0, 0, 0, 2, 96, 1, 0, 0, 0, 2, 98, 1, 0, 0, 0, 2, 100, 1, 0, 0, 0, 2, 102, 1, 0, 0, 0, 2, 104, 1, 0, 0, 0, 2, 106, 1, 0, 0, 0, 2, 108, 1, 0, 0, 0, 2, 110, 1, 0, 0, 0, 2, 112, 1, 0, 0, 0, 2, 114, 1, 0, 0, 0, 2, 116, 1, 0, 0, 0, 2, 118, 1, 0, 0, 0, 2, 120, 1, 0, 0, 0, 2, 122, 1, 0, 0, 0, 2, 124, 1, 0, 0, 0, 2, 126, 1, 0, 0, 0, 2, 128, 1, 0, 0, 0, 2, 130, 1, 0, 0, 0, 2, 132, 1, 0, 0, 0, 2, 134, 1, 0, 0, 0, 2, 136, 1, 0, 0, 0, 2, 138, 1, 0, 0, 0, 2, 140, 1, 0, 0, 0, 2, 142, 1, 0, 0, 0, 2, 144, 1, 0, 0, 0, 2, 146, 1, 0, 0, 0, 2, 148, 1, 0, 0, 0, 2, 150, 1, 0, 0, 0, 2, 152, 1, 0, 0, 0, 2, 154, 1, 0, 0, 0, 2, 156, 1, 0, 0, 0, 2, 158, 1, 0, 0, 0, 2, 160, 1, 0, 0, 0, 2, 162, 1, 0, 0, 0, 2, 164, 1, 0, 0, 0, 2, 166, 1, 0, 0, 0, 2, 168, 1, 0, 0, 0, 2, 170, 1, 0, 0, 0, 2, 172, 1, 0, 0, 0, 2, 174, 1, 0, 0, 0, 2, 176, 1, 0, 0, 0, 2, 180, 1, 0, 0, 0, 2, 182, 1, 0, 0, 0, 2, 184, 1, 0, 0, 0, 2, 186, 1, 0, 0, 0, 3, 188, 1, 0, 0, 0, 3, 190, 1, 0, 0, 0, 3, 192, 1, 0, 0, 0, 3, 194, 1, 0, 0, 0, 3, 196, 1, 0, 0, 0, 3, 198, 1, 0, 0, 0, 3, 200, 1, 0, 0, 0, 3, 202, 1, 0, 0, 0, 3, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 3, 208, 1, 0, 0, 0, 3, 210, 1, 0, 0, 0, 4, 212, 1, 0, 0, 0, 4, 214, 1, 0, 0, 0, 4, 216, 1, 0, 0, 0, 4, 222, 1, 0, 0, 0, 4, 224, 1, 0, 0, 0, 4, 226, 1, 0, 0, 0, 4, 228, 1, 0, 0, 0, 5, 230, 1, 0, 0, 0, 5, 232, 1, 0, 0, 0, 5, 234, 1, 0, 0, 0, 5, 236, 1, 0, 0, 0, 5, 238, 1, 0, 0, 0, 5, 240, 1, 0, 0, 0, 5, 242, 1, 0, 0, 0, 5, 244, 1, 0, 0, 0, 5, 246, 1, 0, 0, 0, 6, 248, 1, 0, 0, 0, 6, 250, 1, 0, 0, 0, 6, 252, 1, 0, 0, 0, 6, 254, 1, 0, 0, 0, 6, 258, 1, 0, 0, 0, 6, 260, 1, 0, 0, 0, 6, 262, 1, 0, 0, 0, 6, 264, 1, 0, 0, 0, 6, 266, 1, 0, 0, 0, 6, 268, 1, 0, 0, 0, 7, 270, 1, 0, 0, 0, 7, 272, 1, 0, 0, 0, 7, 274, 1, 0, 0, 0, 7, 276, 1, 0, 0, 0, 7, 278, 1, 0, 0, 0, 7, 280, 1, 0, 0, 0, 7, 282, 1, 0, 0, 0, 7, 284, 1, 0, 0, 0, 7, 286, 1, 0, 0, 0, 7, 288, 1, 0, 0, 0, 8, 290, 1, 0, 0, 0, 8, 292, 1, 0, 0, 0, 8, 294, 1, 0, 0, 0, 8, 296, 1, 0, 0, 0, 8, 298, 1, 0, 0, 0, 8, 300, 1, 0, 0, 0, 8, 302, 1, 0, 0, 0, 9, 304, 1, 0, 0, 0, 9, 306, 1, 0, 0, 0, 9, 308, 1, 0, 0, 0, 9, 310, 1, 0, 0, 0, 9, 312, 1, 0, 0, 0, 10, 314, 1, 0, 0, 0, 10, 316, 1, 0, 0, 0, 10, 318, 1, 0, 0, 0, 10, 320, 1, 0, 0, 0, 10, 322, 1, 0, 0, 0, 11, 324, 1, 0, 0, 0, 11, 326, 1, 0, 0, 0, 11, 328, 1, 0, 0, 0, 11, 330, 1, 0, 0, 0, 11, 332, 1, 0, 0, 0, 11, 334, 1, 0, 0, 0, 12, 336, 1, 0, 0, 0, 12, 338, 1, 0, 0, 0, 12, 340, 1, 0, 0, 0, 12, 342, 1, 0, 0, 0, 12, 344, 1, 0, 0, 0, 13, 346, 1, 0, 0, 0, 13, 348, 1, 0, 0, 0, 13, 350, 1, 0, 0, 0, 13, 352, 1, 0, 0, 0, 13, 354, 1, 0, 0, 0, 13, 356, 1, 0, 0, 0, 13, 358, 1, 0, 0, 0, 13, 360, 1, 0, 0, 0, 14, 362, 1, 0, 0, 0, 16, 372, 1, 0, 0, 0, 18, 379, 1, 0, 0, 0, 20, 388, 1, 0, 0, 0, 22, 395, 1, 0, 0, 0, 24, 405, 1, 0, 0, 0, 26, 412, 1, 0, 0, 0, 28, 419, 1, 0, 0, 0, 30, 433, 1, 0, 0, 0, 32, 440, 1, 0, 0, 0, 34, 448, 1, 0, 0, 0, 36, 455, 1, 0, 0, 0, 38, 465, 1, 0, 0, 0, 40, 477, 1, 0, 0, 0, 42, 486, 1, 0, 0, 0, 44, 492, 1, 0, 0, 0, 46, 499, 1, 0, 0, 0, 48, 506, 1, 0, 0, 0, 50, 514, 1, 0, 0, 0, 52, 523, 1, 0, 0, 0, 54, 529, 1, 0, 0, 0, 56, 546, 1, 0, 0, 0, 58, 562, 1, 0, 0, 0, 60, 571, 1, 0, 0, 0, 62, 574, 1, 0, 0, 0, 64, 578, 1, 0, 0, 0, 66, 583, 1, 0, 0, 0, 68, 588, 1, 0, 0, 0, 70, 592, 1, 0, 0, 0, 72, 596, 1, 0, 0, 0, 74, 600, 1, 0, 0, 0, 76, 604, 1, 0, 0, 0, 78, 606, 1, 0, 0, 0, 80, 608, 1, 0, 0, 0, 82, 611, 1, 0, 0, 0, 84, 613, 1, 0, 0, 0, 86, 622, 1, 0, 0, 0, 88, 624, 1, 0, 0, 0, 90, 629, 1, 0, 0, 0, 92, 631, 1, 0, 0, 0, 94, 636, 1, 0, 0, 0, 96, 667, 1, 0, 0, 0, 98, 670, 1, 0, 0, 0, 100, 716, 1, 0, 0, 0, 102, 718, 1, 0, 0, 0, 104, 721, 1, 0, 0, 0, 106, 725, 1, 0, 0, 0, 108, 729, 1, 0, 0, 0, 110, 731, 1, 0, 0, 0, 112, 734, 1, 0, 0, 0, 114, 736, 1, 0, 0, 0, 116, 741, 1, 0, 0, 0, 118, 743, 1, 0, 0, 0, 120, 749, 1, 0, 0, 0, 122, 755, 1, 0, 0, 0, 124, 760, 1, 0, 0, 0, 126, 762, 1, 0, 0, 0, 128, 765, 1, 0, 0, 0, 130, 768, 1, 0, 0, 0, 132, 773, 1, 0, 0, 0, 134, 777, 1, 0, 0, 0, 136, 782, 1, 0, 0, 0, 138, 788, 1, 0, 0, 0, 140, 791, 1, 0, 0, 0, 142, 793, 1, 0, 0, 0, 144, 799, 1, 0, 0, 0, 146, 801, 1, 0, 0, 0, 148, 806, 1, 0, 0, 0, 150, 809, 1, 0, 0, 0, 152, 812, 1, 0, 0, 0, 154, 815, 1, 0, 0, 0, 156, 817, 1, 0, 0, 0, 158, 820, 1, 0, 0, 0, 160, 822, 1, 0, 0, 0, 162, 825, 1, 0, 0, 0, 164, 827, 1, 0, 0, 0, 166, 829, 1, 0, 0, 0, 168, 831, 1, 0, 0, 0, 170, 833, 1, 0, 0, 0, 172, 835, 1, 0, 0, 0, 174, 840, 1, 0, 0, 0, 176, 861, 1, 0, 0, 0, 178, 863, 1, 0, 0, 0, 180, 871, 1, 0, 0, 0, 182, 873, 1, 0, 0, 0, 184, 877, 1, 0, 0, 0, 186, 881, 1, 0, 0, 0, 188, 885, 1, 0, 0, 0, 190, 890, 1, 0, 0, 0, 192, 894, 1, 0, 0, 0, 194, 898, 1, 0, 0, 0, 196, 902, 1, 0, 0, 0, 198, 906, 1, 0, 0, 0, 200, 910, 1, 0, 0, 0, 202, 918, 1, 0, 0, 0, 204, 927, 1, 0, 0, 0, 206, 931, 1, 0, 0, 0, 208, 935, 1, 0, 0, 0, 210, 939, 1, 0, 0, 0, 212, 943, 1, 0, 0, 0, 214, 948, 1, 0, 0, 0, 216, 952, 1, 0, 0, 0, 218, 960, 1, 0, 0, 0, 220, 981, 1, 0, 0, 0, 222, 985, 1, 0, 0, 0, 224, 989, 1, 0, 0, 0, 226, 993, 1, 0, 0, 0, 228, 997, 1, 0, 0, 0, 230, 1001, 1, 0, 0, 0, 232, 1006, 1, 0, 0, 0, 234, 1010, 1, 0, 0, 0, 236, 1014, 1, 0, 0, 0, 238, 1018, 1, 0, 0, 0, 240, 1021, 1, 0, 0, 0, 242, 1025, 1, 0, 0, 0, 244, 1029, 1, 0, 0, 0, 246, 1033, 1, 0, 0, 0, 248, 1037, 1, 0, 0, 0, 250, 1042, 1, 0, 0, 0, 252, 1047, 1, 0, 0, 0, 254, 1052, 1, 0, 0, 0, 256, 1059, 1, 0, 0, 0, 258, 1068, 1, 0, 0, 0, 260, 1075, 1, 0, 0, 0, 262, 1079, 1, 0, 0, 0, 264, 1083, 1, 0, 0, 0, 266, 1087, 1, 0, 0, 0, 268, 1091, 1, 0, 0, 0, 270, 1095, 1, 0, 0, 0, 272, 1101, 1, 0, 0, 0, 274, 1105, 1, 0, 0, 0, 276, 1109, 1, 0, 0, 0, 278, 1113, 1, 0, 0, 0, 280, 1117, 1, 0, 0, 0, 282, 1121, 1, 0, 0, 0, 284, 1125, 1, 0, 0, 0, 286, 1129, 1, 0, 0, 0, 288, 1133, 1, 0, 0, 0, 290, 1137, 1, 0, 0, 0, 292, 1142, 1, 0, 0, 0, 294, 1146, 1, 0, 0, 0, 296, 1150, 1, 0, 0, 0, 298, 1154, 1, 0, 0, 0, 300, 1158, 1, 0, 0, 0, 302, 1162, 1, 0, 0, 0, 304, 1166, 1, 0, 0, 0, 306, 1171, 1, 0, 0, 0, 308, 1176, 1, 0, 0, 0, 310, 1180, 1, 0, 0, 0, 312, 1184, 1, 0, 0, 0, 314, 1188, 1, 0, 0, 0, 316, 1193, 1, 0, 0, 0, 318, 1203, 1, 0, 0, 0, 320, 1207, 1, 0, 0, 0, 322, 1211, 1, 0, 0, 0, 324, 1215, 1, 0, 0, 0, 326, 1220, 1, 0, 0, 0, 328, 1227, 1, 0, 0, 0, 330, 1231, 1, 0, 0, 0, 332, 1235, 1, 0, 0, 0, 334, 1239, 1, 0, 0, 0, 336, 1243, 1, 0, 0, 0, 338, 1248, 1, 0, 0, 0, 340, 1254, 1, 0, 0, 0, 342, 1258, 1, 0, 0, 0, 344, 1262, 1, 0, 0, 0, 346, 1266, 1, 0, 0, 0, 348, 1272, 1, 0, 0, 0, 350, 1276, 1, 0, 0, 0, 352, 1280, 1, 0, 0, 0, 354, 1284, 1, 0, 0, 0, 356, 1290, 1, 0, 0, 0, 358, 1296, 1, 0, 0, 0, 360, 1302, 1, 0, 0, 0, 362, 363, 5, 100, 0, 0, 363, 364, 5, 105, 0, 0, 364, 365, 5, 115, 0, 0, 365, 366, 5, 115, 0, 0, 366, 367, 5, 101, 0, 0, 367, 368, 5, 99, 0, 0, 368, 369, 5, 116, 0, 0, 369, 370, 1, 0, 0, 0, 370, 371, 6, 0, 0, 0, 371, 15, 1, 0, 0, 0, 372, 373, 5, 100, 0, 0, 373, 374, 5, 114, 0, 0, 374, 375, 5, 111, 0, 0, 375, 376, 5, 112, 0, 0, 376, 377, 1, 0, 0, 0, 377, 378, 6, 1, 1, 0, 378, 17, 1, 0, 0, 0, 379, 380, 5, 101, 0, 0, 380, 381, 5, 110, 0, 0, 381, 382, 5, 114, 0, 0, 382, 383, 5, 105, 0, 0, 383, 384, 5, 99, 0, 0, 384, 385, 5, 104, 0, 0, 385, 386, 1, 0, 0, 0, 386, 387, 6, 2, 2, 0, 387, 19, 1, 0, 0, 0, 388, 389, 5, 101, 0, 0, 389, 390, 5, 118, 0, 0, 390, 391, 5, 97, 0, 0, 391, 392, 5, 108, 0, 0, 392, 393, 1, 0, 0, 0, 393, 394, 6, 3, 0, 0, 394, 21, 1, 0, 0, 0, 395, 396, 5, 101, 0, 0, 396, 397, 5, 120, 0, 0, 397, 398, 5, 112, 0, 0, 398, 399, 5, 108, 0, 0, 399, 400, 5, 97, 0, 0, 400, 401, 5, 105, 0, 0, 401, 402, 5, 110, 0, 0, 402, 403, 1, 0, 0, 0, 403, 404, 6, 4, 3, 0, 404, 23, 1, 0, 0, 0, 405, 406, 5, 102, 0, 0, 406, 407, 5, 114, 0, 0, 407, 408, 5, 111, 0, 0, 408, 409, 5, 109, 0, 0, 409, 410, 1, 0, 0, 0, 410, 411, 6, 5, 4, 0, 411, 25, 1, 0, 0, 0, 412, 413, 5, 103, 0, 0, 413, 414, 5, 114, 0, 0, 414, 415, 5, 111, 0, 0, 415, 416, 5, 107, 0, 0, 416, 417, 1, 0, 0, 0, 417, 418, 6, 6, 0, 0, 418, 27, 1, 0, 0, 0, 419, 420, 5, 105, 0, 0, 420, 421, 5, 110, 0, 0, 421, 422, 5, 108, 0, 0, 422, 423, 5, 105, 0, 0, 423, 424, 5, 110, 0, 0, 424, 425, 5, 101, 0, 0, 425, 426, 5, 115, 0, 0, 426, 427, 5, 116, 0, 0, 427, 428, 5, 97, 0, 0, 428, 429, 5, 116, 0, 0, 429, 430, 5, 115, 0, 0, 430, 431, 1, 0, 0, 0, 431, 432, 6, 7, 0, 0, 432, 29, 1, 0, 0, 0, 433, 434, 5, 107, 0, 0, 434, 435, 5, 101, 0, 0, 435, 436, 5, 101, 0, 0, 436, 437, 5, 112, 0, 0, 437, 438, 1, 0, 0, 0, 438, 439, 6, 8, 1, 0, 439, 31, 1, 0, 0, 0, 440, 441, 5, 108, 0, 0, 441, 442, 5, 105, 0, 0, 442, 443, 5, 109, 0, 0, 443, 444, 5, 105, 0, 0, 444, 445, 5, 116, 0, 0, 445, 446, 1, 0, 0, 0, 446, 447, 6, 9, 0, 0, 447, 33, 1, 0, 0, 0, 448, 449, 5, 109, 0, 0, 449, 450, 5, 101, 0, 0, 450, 451, 5, 116, 0, 0, 451, 452, 5, 97, 0, 0, 452, 453, 1, 0, 0, 0, 453, 454, 6, 10, 5, 0, 454, 35, 1, 0, 0, 0, 455, 456, 5, 109, 0, 0, 456, 457, 5, 101, 0, 0, 457, 458, 5, 116, 0, 0, 458, 459, 5, 114, 0, 0, 459, 460, 5, 105, 0, 0, 460, 461, 5, 99, 0, 0, 461, 462, 5, 115, 0, 0, 462, 463, 1, 0, 0, 0, 463, 464, 6, 11, 6, 0, 464, 37, 1, 0, 0, 0, 465, 466, 5, 109, 0, 0, 466, 467, 5, 118, 0, 0, 467, 468, 5, 95, 0, 0, 468, 469, 5, 101, 0, 0, 469, 470, 5, 120, 0, 0, 470, 471, 5, 112, 0, 0, 471, 472, 5, 97, 0, 0, 472, 473, 5, 110, 0, 0, 473, 474, 5, 100, 0, 0, 474, 475, 1, 0, 0, 0, 475, 476, 6, 12, 7, 0, 476, 39, 1, 0, 0, 0, 477, 478, 5, 114, 0, 0, 478, 479, 5, 101, 0, 0, 479, 480, 5, 110, 0, 0, 480, 481, 5, 97, 0, 0, 481, 482, 5, 109, 0, 0, 482, 483, 5, 101, 0, 0, 483, 484, 1, 0, 0, 0, 484, 485, 6, 13, 8, 0, 485, 41, 1, 0, 0, 0, 486, 487, 5, 114, 0, 0, 487, 488, 5, 111, 0, 0, 488, 489, 5, 119, 0, 0, 489, 490, 1, 0, 0, 0, 490, 491, 6, 14, 0, 0, 491, 43, 1, 0, 0, 0, 492, 493, 5, 115, 0, 0, 493, 494, 5, 104, 0, 0, 494, 495, 5, 111, 0, 0, 495, 496, 5, 119, 0, 0, 496, 497, 1, 0, 0, 0, 497, 498, 6, 15, 9, 0, 498, 45, 1, 0, 0, 0, 499, 500, 5, 115, 0, 0, 500, 501, 5, 111, 0, 0, 501, 502, 5, 114, 0, 0, 502, 503, 5, 116, 0, 0, 503, 504, 1, 0, 0, 0, 504, 505, 6, 16, 0, 0, 505, 47, 1, 0, 0, 0, 506, 507, 5, 115, 0, 0, 507, 508, 5, 116, 0, 0, 508, 509, 5, 97, 0, 0, 509, 510, 5, 116, 0, 0, 510, 511, 5, 115, 0, 0, 511, 512, 1, 0, 0, 0, 512, 513, 6, 17, 0, 0, 513, 49, 1, 0, 0, 0, 514, 515, 5, 119, 0, 0, 515, 516, 5, 104, 0, 0, 516, 517, 5, 101, 0, 0, 517, 518, 5, 114, 0, 0, 518, 519, 5, 101, 0, 0, 519, 520, 1, 0, 0, 0, 520, 521, 6, 18, 0, 0, 521, 51, 1, 0, 0, 0, 522, 524, 8, 0, 0, 0, 523, 522, 1, 0, 0, 0, 524, 525, 1, 0, 0, 0, 525, 523, 1, 0, 0, 0, 525, 526, 1, 0, 0, 0, 526, 527, 1, 0, 0, 0, 527, 528, 6, 19, 0, 0, 528, 53, 1, 0, 0, 0, 529, 530, 5, 47, 0, 0, 530, 531, 5, 47, 0, 0, 531, 535, 1, 0, 0, 0, 532, 534, 8, 1, 0, 0, 533, 532, 1, 0, 0, 0, 534, 537, 1, 0, 0, 0, 535, 533, 1, 0, 0, 0, 535, 536, 1, 0, 0, 0, 536, 539, 1, 0, 0, 0, 537, 535, 1, 0, 0, 0, 538, 540, 5, 13, 0, 0, 539, 538, 1, 0, 0, 0, 539, 540, 1, 0, 0, 0, 540, 542, 1, 0, 0, 0, 541, 543, 5, 10, 0, 0, 542, 541, 1, 0, 0, 0, 542, 543, 1, 0, 0, 0, 543, 544, 1, 0, 0, 0, 544, 545, 6, 20, 10, 0, 545, 55, 1, 0, 0, 0, 546, 547, 5, 47, 0, 0, 547, 548, 5, 42, 0, 0, 548, 553, 1, 0, 0, 0, 549, 552, 3, 56, 21, 0, 550, 552, 9, 0, 0, 0, 551, 549, 1, 0, 0, 0, 551, 550, 1, 0, 0, 0, 552, 555, 1, 0, 0, 0, 553, 554, 1, 0, 0, 0, 553, 551, 1, 0, 0, 0, 554, 556, 1, 0, 0, 0, 555, 553, 1, 0, 0, 0, 556, 557, 5, 42, 0, 0, 557, 558, 5, 47, 0, 0, 558, 559, 1, 0, 0, 0, 559, 560, 6, 21, 10, 0, 560, 57, 1, 0, 0, 0, 561, 563, 7, 2, 0, 0, 562, 561, 1, 0, 0, 0, 563, 564, 1, 0, 0, 0, 564, 562, 1, 0, 0, 0, 564, 565, 1, 0, 0, 0, 565, 566, 1, 0, 0, 0, 566, 567, 6, 22, 10, 0, 567, 59, 1, 0, 0, 0, 568, 572, 8, 3, 0, 0, 569, 570, 5, 47, 0, 0, 570, 572, 8, 4, 0, 0, 571, 568, 1, 0, 0, 0, 571, 569, 1, 0, 0, 0, 572, 61, 1, 0, 0, 0, 573, 575, 3, 60, 23, 0, 574, 573, 1, 0, 0, 0, 575, 576, 1, 0, 0, 0, 576, 574, 1, 0, 0, 0, 576, 577, 1, 0, 0, 0, 577, 63, 1, 0, 0, 0, 578, 579, 3, 172, 79, 0, 579, 580, 1, 0, 0, 0, 580, 581, 6, 25, 11, 0, 581, 582, 6, 25, 12, 0, 582, 65, 1, 0, 0, 0, 583, 584, 3, 74, 30, 0, 584, 585, 1, 0, 0, 0, 585, 586, 6, 26, 13, 0, 586, 587, 6, 26, 14, 0, 587, 67, 1, 0, 0, 0, 588, 589, 3, 58, 22, 0, 589, 590, 1, 0, 0, 0, 590, 591, 6, 27, 10, 0, 591, 69, 1, 0, 0, 0, 592, 593, 3, 54, 20, 0, 593, 594, 1, 0, 0, 0, 594, 595, 6, 28, 10, 0, 595, 71, 1, 0, 0, 0, 596, 597, 3, 56, 21, 0, 597, 598, 1, 0, 0, 0, 598, 599, 6, 29, 10, 0, 599, 73, 1, 0, 0, 0, 600, 601, 5, 124, 0, 0, 601, 602, 1, 0, 0, 0, 602, 603, 6, 30, 14, 0, 603, 75, 1, 0, 0, 0, 604, 605, 7, 5, 0, 0, 605, 77, 1, 0, 0, 0, 606, 607, 7, 6, 0, 0, 607, 79, 1, 0, 0, 0, 608, 609, 5, 92, 0, 0, 609, 610, 7, 7, 0, 0, 610, 81, 1, 0, 0, 0, 611, 612, 8, 8, 0, 0, 612, 83, 1, 0, 0, 0, 613, 615, 7, 9, 0, 0, 614, 616, 7, 10, 0, 0, 615, 614, 1, 0, 0, 0, 615, 616, 1, 0, 0, 0, 616, 618, 1, 0, 0, 0, 617, 619, 3, 76, 31, 0, 618, 617, 1, 0, 0, 0, 619, 620, 1, 0, 0, 0, 620, 618, 1, 0, 0, 0, 620, 621, 1, 0, 0, 0, 621, 85, 1, 0, 0, 0, 622, 623, 5, 64, 0, 0, 623, 87, 1, 0, 0, 0, 624, 625, 5, 96, 0, 0, 625, 89, 1, 0, 0, 0, 626, 630, 8, 11, 0, 0, 627, 628, 5, 96, 0, 0, 628, 630, 5, 96, 0, 0, 629, 626, 1, 0, 0, 0, 629, 627, 1, 0, 0, 0, 630, 91, 1, 0, 0, 0, 631, 632, 5, 95, 0, 0, 632, 93, 1, 0, 0, 0, 633, 637, 3, 78, 32, 0, 634, 637, 3, 76, 31, 0, 635, 637, 3, 92, 39, 0, 636, 633, 1, 0, 0, 0, 636, 634, 1, 0, 0, 0, 636, 635, 1, 0, 0, 0, 637, 95, 1, 0, 0, 0, 638, 643, 5, 34, 0, 0, 639, 642, 3, 80, 33, 0, 640, 642, 3, 82, 34, 0, 641, 639, 1, 0, 0, 0, 641, 640, 1, 0, 0, 0, 642, 645, 1, 0, 0, 0, 643, 641, 1, 0, 0, 0, 643, 644, 1, 0, 0, 0, 644, 646, 1, 0, 0, 0, 645, 643, 1, 0, 0, 0, 646, 668, 5, 34, 0, 0, 647, 648, 5, 34, 0, 0, 648, 649, 5, 34, 0, 0, 649, 650, 5, 34, 0, 0, 650, 654, 1, 0, 0, 0, 651, 653, 8, 1, 0, 0, 652, 651, 1, 0, 0, 0, 653, 656, 1, 0, 0, 0, 654, 655, 1, 0, 0, 0, 654, 652, 1, 0, 0, 0, 655, 657, 1, 0, 0, 0, 656, 654, 1, 0, 0, 0, 657, 658, 5, 34, 0, 0, 658, 659, 5, 34, 0, 0, 659, 660, 5, 34, 0, 0, 660, 662, 1, 0, 0, 0, 661, 663, 5, 34, 0, 0, 662, 661, 1, 0, 0, 0, 662, 663, 1, 0, 0, 0, 663, 665, 1, 0, 0, 0, 664, 666, 5, 34, 0, 0, 665, 664, 1, 0, 0, 0, 665, 666, 1, 0, 0, 0, 666, 668, 1, 0, 0, 0, 667, 638, 1, 0, 0, 0, 667, 647, 1, 0, 0, 0, 668, 97, 1, 0, 0, 0, 669, 671, 3, 76, 31, 0, 670, 669, 1, 0, 0, 0, 671, 672, 1, 0, 0, 0, 672, 670, 1, 0, 0, 0, 672, 673, 1, 0, 0, 0, 673, 99, 1, 0, 0, 0, 674, 676, 3, 76, 31, 0, 675, 674, 1, 0, 0, 0, 676, 677, 1, 0, 0, 0, 677, 675, 1, 0, 0, 0, 677, 678, 1, 0, 0, 0, 678, 679, 1, 0, 0, 0, 679, 683, 3, 116, 51, 0, 680, 682, 3, 76, 31, 0, 681, 680, 1, 0, 0, 0, 682, 685, 1, 0, 0, 0, 683, 681, 1, 0, 0, 0, 683, 684, 1, 0, 0, 0, 684, 717, 1, 0, 0, 0, 685, 683, 1, 0, 0, 0, 686, 688, 3, 116, 51, 0, 687, 689, 3, 76, 31, 0, 688, 687, 1, 0, 0, 0, 689, 690, 1, 0, 0, 0, 690, 688, 1, 0, 0, 0, 690, 691, 1, 0, 0, 0, 691, 717, 1, 0, 0, 0, 692, 694, 3, 76, 31, 0, 693, 692, 1, 0, 0, 0, 694, 695, 1, 0, 0, 0, 695, 693, 1, 0, 0, 0, 695, 696, 1, 0, 0, 0, 696, 704, 1, 0, 0, 0, 697, 701, 3, 116, 51, 0, 698, 700, 3, 76, 31, 0, 699, 698, 1, 0, 0, 0, 700, 703, 1, 0, 0, 0, 701, 699, 1, 0, 0, 0, 701, 702, 1, 0, 0, 0, 702, 705, 1, 0, 0, 0, 703, 701, 1, 0, 0, 0, 704, 697, 1, 0, 0, 0, 704, 705, 1, 0, 0, 0, 705, 706, 1, 0, 0, 0, 706, 707, 3, 84, 35, 0, 707, 717, 1, 0, 0, 0, 708, 710, 3, 116, 51, 0, 709, 711, 3, 76, 31, 0, 710, 709, 1, 0, 0, 0, 711, 712, 1, 0, 0, 0, 712, 710, 1, 0, 0, 0, 712, 713, 1, 0, 0, 0, 713, 714, 1, 0, 0, 0, 714, 715, 3, 84, 35, 0, 715, 717, 1, 0, 0, 0, 716, 675, 1, 0, 0, 0, 716, 686, 1, 0, 0, 0, 716, 693, 1, 0, 0, 0, 716, 708, 1, 0, 0, 0, 717, 101, 1, 0, 0, 0, 718, 719, 5, 98, 0, 0, 719, 720, 5, 121, 0, 0, 720, 103, 1, 0, 0, 0, 721, 722, 5, 97, 0, 0, 722, 723, 5, 110, 0, 0, 723, 724, 5, 100, 0, 0, 724, 105, 1, 0, 0, 0, 725, 726, 5, 97, 0, 0, 726, 727, 5, 115, 0, 0, 727, 728, 5, 99, 0, 0, 728, 107, 1, 0, 0, 0, 729, 730, 5, 61, 0, 0, 730, 109, 1, 0, 0, 0, 731, 732, 5, 58, 0, 0, 732, 733, 5, 58, 0, 0, 733, 111, 1, 0, 0, 0, 734, 735, 5, 44, 0, 0, 735, 113, 1, 0, 0, 0, 736, 737, 5, 100, 0, 0, 737, 738, 5, 101, 0, 0, 738, 739, 5, 115, 0, 0, 739, 740, 5, 99, 0, 0, 740, 115, 1, 0, 0, 0, 741, 742, 5, 46, 0, 0, 742, 117, 1, 0, 0, 0, 743, 744, 5, 102, 0, 0, 744, 745, 5, 97, 0, 0, 745, 746, 5, 108, 0, 0, 746, 747, 5, 115, 0, 0, 747, 748, 5, 101, 0, 0, 748, 119, 1, 0, 0, 0, 749, 750, 5, 102, 0, 0, 750, 751, 5, 105, 0, 0, 751, 752, 5, 114, 0, 0, 752, 753, 5, 115, 0, 0, 753, 754, 5, 116, 0, 0, 754, 121, 1, 0, 0, 0, 755, 756, 5, 108, 0, 0, 756, 757, 5, 97, 0, 0, 757, 758, 5, 115, 0, 0, 758, 759, 5, 116, 0, 0, 759, 123, 1, 0, 0, 0, 760, 761, 5, 40, 0, 0, 761, 125, 1, 0, 0, 0, 762, 763, 5, 105, 0, 0, 763, 764, 5, 110, 0, 0, 764, 127, 1, 0, 0, 0, 765, 766, 5, 105, 0, 0, 766, 767, 5, 115, 0, 0, 767, 129, 1, 0, 0, 0, 768, 769, 5, 108, 0, 0, 769, 770, 5, 105, 0, 0, 770, 771, 5, 107, 0, 0, 771, 772, 5, 101, 0, 0, 772, 131, 1, 0, 0, 0, 773, 774, 5, 110, 0, 0, 774, 775, 5, 111, 0, 0, 775, 776, 5, 116, 0, 0, 776, 133, 1, 0, 0, 0, 777, 778, 5, 110, 0, 0, 778, 779, 5, 117, 0, 0, 779, 780, 5, 108, 0, 0, 780, 781, 5, 108, 0, 0, 781, 135, 1, 0, 0, 0, 782, 783, 5, 110, 0, 0, 783, 784, 5, 117, 0, 0, 784, 785, 5, 108, 0, 0, 785, 786, 5, 108, 0, 0, 786, 787, 5, 115, 0, 0, 787, 137, 1, 0, 0, 0, 788, 789, 5, 111, 0, 0, 789, 790, 5, 114, 0, 0, 790, 139, 1, 0, 0, 0, 791, 792, 5, 63, 0, 0, 792, 141, 1, 0, 0, 0, 793, 794, 5, 114, 0, 0, 794, 795, 5, 108, 0, 0, 795, 796, 5, 105, 0, 0, 796, 797, 5, 107, 0, 0, 797, 798, 5, 101, 0, 0, 798, 143, 1, 0, 0, 0, 799, 800, 5, 41, 0, 0, 800, 145, 1, 0, 0, 0, 801, 802, 5, 116, 0, 0, 802, 803, 5, 114, 0, 0, 803, 804, 5, 117, 0, 0, 804, 805, 5, 101, 0, 0, 805, 147, 1, 0, 0, 0, 806, 807, 5, 61, 0, 0, 807, 808, 5, 61, 0, 0, 808, 149, 1, 0, 0, 0, 809, 810, 5, 61, 0, 0, 810, 811, 5, 126, 0, 0, 811, 151, 1, 0, 0, 0, 812, 813, 5, 33, 0, 0, 813, 814, 5, 61, 0, 0, 814, 153, 1, 0, 0, 0, 815, 816, 5, 60, 0, 0, 816, 155, 1, 0, 0, 0, 817, 818, 5, 60, 0, 0, 818, 819, 5, 61, 0, 0, 819, 157, 1, 0, 0, 0, 820, 821, 5, 62, 0, 0, 821, 159, 1, 0, 0, 0, 822, 823, 5, 62, 0, 0, 823, 824, 5, 61, 0, 0, 824, 161, 1, 0, 0, 0, 825, 826, 5, 43, 0, 0, 826, 163, 1, 0, 0, 0, 827, 828, 5, 45, 0, 0, 828, 165, 1, 0, 0, 0, 829, 830, 5, 42, 0, 0, 830, 167, 1, 0, 0, 0, 831, 832, 5, 47, 0, 0, 832, 169, 1, 0, 0, 0, 833, 834, 5, 37, 0, 0, 834, 171, 1, 0, 0, 0, 835, 836, 5, 91, 0, 0, 836, 837, 1, 0, 0, 0, 837, 838, 6, 79, 0, 0, 838, 839, 6, 79, 0, 0, 839, 173, 1, 0, 0, 0, 840, 841, 5, 93, 0, 0, 841, 842, 1, 0, 0, 0, 842, 843, 6, 80, 14, 0, 843, 844, 6, 80, 14, 0, 844, 175, 1, 0, 0, 0, 845, 849, 3, 78, 32, 0, 846, 848, 3, 94, 40, 0, 847, 846, 1, 0, 0, 0, 848, 851, 1, 0, 0, 0, 849, 847, 1, 0, 0, 0, 849, 850, 1, 0, 0, 0, 850, 862, 1, 0, 0, 0, 851, 849, 1, 0, 0, 0, 852, 855, 3, 92, 39, 0, 853, 855, 3, 86, 36, 0, 854, 852, 1, 0, 0, 0, 854, 853, 1, 0, 0, 0, 855, 857, 1, 0, 0, 0, 856, 858, 3, 94, 40, 0, 857, 856, 1, 0, 0, 0, 858, 859, 1, 0, 0, 0, 859, 857, 1, 0, 0, 0, 859, 860, 1, 0, 0, 0, 860, 862, 1, 0, 0, 0, 861, 845, 1, 0, 0, 0, 861, 854, 1, 0, 0, 0, 862, 177, 1, 0, 0, 0, 863, 865, 3, 88, 37, 0, 864, 866, 3, 90, 38, 0, 865, 864, 1, 0, 0, 0, 866, 867, 1, 0, 0, 0, 867, 865, 1, 0, 0, 0, 867, 868, 1, 0, 0, 0, 868, 869, 1, 0, 0, 0, 869, 870, 3, 88, 37, 0, 870, 179, 1, 0, 0, 0, 871, 872, 3, 178, 82, 0, 872, 181, 1, 0, 0, 0, 873, 874, 3, 54, 20, 0, 874, 875, 1, 0, 0, 0, 875, 876, 6, 84, 10, 0, 876, 183, 1, 0, 0, 0, 877, 878, 3, 56, 21, 0, 878, 879, 1, 0, 0, 0, 879, 880, 6, 85, 10, 0, 880, 185, 1, 0, 0, 0, 881, 882, 3, 58, 22, 0, 882, 883, 1, 0, 0, 0, 883, 884, 6, 86, 10, 0, 884, 187, 1, 0, 0, 0, 885, 886, 3, 74, 30, 0, 886, 887, 1, 0, 0, 0, 887, 888, 6, 87, 13, 0, 888, 889, 6, 87, 14, 0, 889, 189, 1, 0, 0, 0, 890, 891, 3, 172, 79, 0, 891, 892, 1, 0, 0, 0, 892, 893, 6, 88, 11, 0, 893, 191, 1, 0, 0, 0, 894, 895, 3, 174, 80, 0, 895, 896, 1, 0, 0, 0, 896, 897, 6, 89, 15, 0, 897, 193, 1, 0, 0, 0, 898, 899, 3, 112, 49, 0, 899, 900, 1, 0, 0, 0, 900, 901, 6, 90, 16, 0, 901, 195, 1, 0, 0, 0, 902, 903, 3, 108, 47, 0, 903, 904, 1, 0, 0, 0, 904, 905, 6, 91, 17, 0, 905, 197, 1, 0, 0, 0, 906, 907, 3, 96, 41, 0, 907, 908, 1, 0, 0, 0, 908, 909, 6, 92, 18, 0, 909, 199, 1, 0, 0, 0, 910, 911, 5, 111, 0, 0, 911, 912, 5, 112, 0, 0, 912, 913, 5, 116, 0, 0, 913, 914, 5, 105, 0, 0, 914, 915, 5, 111, 0, 0, 915, 916, 5, 110, 0, 0, 916, 917, 5, 115, 0, 0, 917, 201, 1, 0, 0, 0, 918, 919, 5, 109, 0, 0, 919, 920, 5, 101, 0, 0, 920, 921, 5, 116, 0, 0, 921, 922, 5, 97, 0, 0, 922, 923, 5, 100, 0, 0, 923, 924, 5, 97, 0, 0, 924, 925, 5, 116, 0, 0, 925, 926, 5, 97, 0, 0, 926, 203, 1, 0, 0, 0, 927, 928, 3, 62, 24, 0, 928, 929, 1, 0, 0, 0, 929, 930, 6, 95, 19, 0, 930, 205, 1, 0, 0, 0, 931, 932, 3, 54, 20, 0, 932, 933, 1, 0, 0, 0, 933, 934, 6, 96, 10, 0, 934, 207, 1, 0, 0, 0, 935, 936, 3, 56, 21, 0, 936, 937, 1, 0, 0, 0, 937, 938, 6, 97, 10, 0, 938, 209, 1, 0, 0, 0, 939, 940, 3, 58, 22, 0, 940, 941, 1, 0, 0, 0, 941, 942, 6, 98, 10, 0, 942, 211, 1, 0, 0, 0, 943, 944, 3, 74, 30, 0, 944, 945, 1, 0, 0, 0, 945, 946, 6, 99, 13, 0, 946, 947, 6, 99, 14, 0, 947, 213, 1, 0, 0, 0, 948, 949, 3, 116, 51, 0, 949, 950, 1, 0, 0, 0, 950, 951, 6, 100, 20, 0, 951, 215, 1, 0, 0, 0, 952, 953, 3, 112, 49, 0, 953, 954, 1, 0, 0, 0, 954, 955, 6, 101, 16, 0, 955, 217, 1, 0, 0, 0, 956, 961, 3, 78, 32, 0, 957, 961, 3, 76, 31, 0, 958, 961, 3, 92, 39, 0, 959, 961, 3, 166, 76, 0, 960, 956, 1, 0, 0, 0, 960, 957, 1, 0, 0, 0, 960, 958, 1, 0, 0, 0, 960, 959, 1, 0, 0, 0, 961, 219, 1, 0, 0, 0, 962, 965, 3, 78, 32, 0, 963, 965, 3, 166, 76, 0, 964, 962, 1, 0, 0, 0, 964, 963, 1, 0, 0, 0, 965, 969, 1, 0, 0, 0, 966, 968, 3, 218, 102, 0, 967, 966, 1, 0, 0, 0, 968, 971, 1, 0, 0, 0, 969, 967, 1, 0, 0, 0, 969, 970, 1, 0, 0, 0, 970, 982, 1, 0, 0, 0, 971, 969, 1, 0, 0, 0, 972, 975, 3, 92, 39, 0, 973, 975, 3, 86, 36, 0, 974, 972, 1, 0, 0, 0, 974, 973, 1, 0, 0, 0, 975, 977, 1, 0, 0, 0, 976, 978, 3, 218, 102, 0, 977, 976, 1, 0, 0, 0, 978, 979, 1, 0, 0, 0, 979, 977, 1, 0, 0, 0, 979, 980, 1, 0, 0, 0, 980, 982, 1, 0, 0, 0, 981, 964, 1, 0, 0, 0, 981, 974, 1, 0, 0, 0, 982, 221, 1, 0, 0, 0, 983, 986, 3, 220, 103, 0, 984, 986, 3, 178, 82, 0, 985, 983, 1, 0, 0, 0, 985, 984, 1, 0, 0, 0, 986, 987, 1, 0, 0, 0, 987, 985, 1, 0, 0, 0, 987, 988, 1, 0, 0, 0, 988, 223, 1, 0, 0, 0, 989, 990, 3, 54, 20, 0, 990, 991, 1, 0, 0, 0, 991, 992, 6, 105, 10, 0, 992, 225, 1, 0, 0, 0, 993, 994, 3, 56, 21, 0, 994, 995, 1, 0, 0, 0, 995, 996, 6, 106, 10, 0, 996, 227, 1, 0, 0, 0, 997, 998, 3, 58, 22, 0, 998, 999, 1, 0, 0, 0, 999, 1000, 6, 107, 10, 0, 1000, 229, 1, 0, 0, 0, 1001, 1002, 3, 74, 30, 0, 1002, 1003, 1, 0, 0, 0, 1003, 1004, 6, 108, 13, 0, 1004, 1005, 6, 108, 14, 0, 1005, 231, 1, 0, 0, 0, 1006, 1007, 3, 108, 47, 0, 1007, 1008, 1, 0, 0, 0, 1008, 1009, 6, 109, 17, 0, 1009, 233, 1, 0, 0, 0, 1010, 1011, 3, 112, 49, 0, 1011, 1012, 1, 0, 0, 0, 1012, 1013, 6, 110, 16, 0, 1013, 235, 1, 0, 0, 0, 1014, 1015, 3, 116, 51, 0, 1015, 1016, 1, 0, 0, 0, 1016, 1017, 6, 111, 20, 0, 1017, 237, 1, 0, 0, 0, 1018, 1019, 5, 97, 0, 0, 1019, 1020, 5, 115, 0, 0, 1020, 239, 1, 0, 0, 0, 1021, 1022, 3, 222, 104, 0, 1022, 1023, 1, 0, 0, 0, 1023, 1024, 6, 113, 21, 0, 1024, 241, 1, 0, 0, 0, 1025, 1026, 3, 54, 20, 0, 1026, 1027, 1, 0, 0, 0, 1027, 1028, 6, 114, 10, 0, 1028, 243, 1, 0, 0, 0, 1029, 1030, 3, 56, 21, 0, 1030, 1031, 1, 0, 0, 0, 1031, 1032, 6, 115, 10, 0, 1032, 245, 1, 0, 0, 0, 1033, 1034, 3, 58, 22, 0, 1034, 1035, 1, 0, 0, 0, 1035, 1036, 6, 116, 10, 0, 1036, 247, 1, 0, 0, 0, 1037, 1038, 3, 74, 30, 0, 1038, 1039, 1, 0, 0, 0, 1039, 1040, 6, 117, 13, 0, 1040, 1041, 6, 117, 14, 0, 1041, 249, 1, 0, 0, 0, 1042, 1043, 3, 172, 79, 0, 1043, 1044, 1, 0, 0, 0, 1044, 1045, 6, 118, 11, 0, 1045, 1046, 6, 118, 22, 0, 1046, 251, 1, 0, 0, 0, 1047, 1048, 5, 111, 0, 0, 1048, 1049, 5, 110, 0, 0, 1049, 1050, 1, 0, 0, 0, 1050, 1051, 6, 119, 23, 0, 1051, 253, 1, 0, 0, 0, 1052, 1053, 5, 119, 0, 0, 1053, 1054, 5, 105, 0, 0, 1054, 1055, 5, 116, 0, 0, 1055, 1056, 5, 104, 0, 0, 1056, 1057, 1, 0, 0, 0, 1057, 1058, 6, 120, 23, 0, 1058, 255, 1, 0, 0, 0, 1059, 1060, 8, 12, 0, 0, 1060, 257, 1, 0, 0, 0, 1061, 1063, 3, 256, 121, 0, 1062, 1061, 1, 0, 0, 0, 1063, 1064, 1, 0, 0, 0, 1064, 1062, 1, 0, 0, 0, 1064, 1065, 1, 0, 0, 0, 1065, 1066, 1, 0, 0, 0, 1066, 1067, 3, 326, 156, 0, 1067, 1069, 1, 0, 0, 0, 1068, 1062, 1, 0, 0, 0, 1068, 1069, 1, 0, 0, 0, 1069, 1071, 1, 0, 0, 0, 1070, 1072, 3, 256, 121, 0, 1071, 1070, 1, 0, 0, 0, 1072, 1073, 1, 0, 0, 0, 1073, 1071, 1, 0, 0, 0, 1073, 1074, 1, 0, 0, 0, 1074, 259, 1, 0, 0, 0, 1075, 1076, 3, 180, 83, 0, 1076, 1077, 1, 0, 0, 0, 1077, 1078, 6, 123, 24, 0, 1078, 261, 1, 0, 0, 0, 1079, 1080, 3, 258, 122, 0, 1080, 1081, 1, 0, 0, 0, 1081, 1082, 6, 124, 25, 0, 1082, 263, 1, 0, 0, 0, 1083, 1084, 3, 54, 20, 0, 1084, 1085, 1, 0, 0, 0, 1085, 1086, 6, 125, 10, 0, 1086, 265, 1, 0, 0, 0, 1087, 1088, 3, 56, 21, 0, 1088, 1089, 1, 0, 0, 0, 1089, 1090, 6, 126, 10, 0, 1090, 267, 1, 0, 0, 0, 1091, 1092, 3, 58, 22, 0, 1092, 1093, 1, 0, 0, 0, 1093, 1094, 6, 127, 10, 0, 1094, 269, 1, 0, 0, 0, 1095, 1096, 3, 74, 30, 0, 1096, 1097, 1, 0, 0, 0, 1097, 1098, 6, 128, 13, 0, 1098, 1099, 6, 128, 14, 0, 1099, 1100, 6, 128, 14, 0, 1100, 271, 1, 0, 0, 0, 1101, 1102, 3, 108, 47, 0, 1102, 1103, 1, 0, 0, 0, 1103, 1104, 6, 129, 17, 0, 1104, 273, 1, 0, 0, 0, 1105, 1106, 3, 112, 49, 0, 1106, 1107, 1, 0, 0, 0, 1107, 1108, 6, 130, 16, 0, 1108, 275, 1, 0, 0, 0, 1109, 1110, 3, 116, 51, 0, 1110, 1111, 1, 0, 0, 0, 1111, 1112, 6, 131, 20, 0, 1112, 277, 1, 0, 0, 0, 1113, 1114, 3, 254, 120, 0, 1114, 1115, 1, 0, 0, 0, 1115, 1116, 6, 132, 26, 0, 1116, 279, 1, 0, 0, 0, 1117, 1118, 3, 222, 104, 0, 1118, 1119, 1, 0, 0, 0, 1119, 1120, 6, 133, 21, 0, 1120, 281, 1, 0, 0, 0, 1121, 1122, 3, 180, 83, 0, 1122, 1123, 1, 0, 0, 0, 1123, 1124, 6, 134, 24, 0, 1124, 283, 1, 0, 0, 0, 1125, 1126, 3, 54, 20, 0, 1126, 1127, 1, 0, 0, 0, 1127, 1128, 6, 135, 10, 0, 1128, 285, 1, 0, 0, 0, 1129, 1130, 3, 56, 21, 0, 1130, 1131, 1, 0, 0, 0, 1131, 1132, 6, 136, 10, 0, 1132, 287, 1, 0, 0, 0, 1133, 1134, 3, 58, 22, 0, 1134, 1135, 1, 0, 0, 0, 1135, 1136, 6, 137, 10, 0, 1136, 289, 1, 0, 0, 0, 1137, 1138, 3, 74, 30, 0, 1138, 1139, 1, 0, 0, 0, 1139, 1140, 6, 138, 13, 0, 1140, 1141, 6, 138, 14, 0, 1141, 291, 1, 0, 0, 0, 1142, 1143, 3, 116, 51, 0, 1143, 1144, 1, 0, 0, 0, 1144, 1145, 6, 139, 20, 0, 1145, 293, 1, 0, 0, 0, 1146, 1147, 3, 180, 83, 0, 1147, 1148, 1, 0, 0, 0, 1148, 1149, 6, 140, 24, 0, 1149, 295, 1, 0, 0, 0, 1150, 1151, 3, 176, 81, 0, 1151, 1152, 1, 0, 0, 0, 1152, 1153, 6, 141, 27, 0, 1153, 297, 1, 0, 0, 0, 1154, 1155, 3, 54, 20, 0, 1155, 1156, 1, 0, 0, 0, 1156, 1157, 6, 142, 10, 0, 1157, 299, 1, 0, 0, 0, 1158, 1159, 3, 56, 21, 0, 1159, 1160, 1, 0, 0, 0, 1160, 1161, 6, 143, 10, 0, 1161, 301, 1, 0, 0, 0, 1162, 1163, 3, 58, 22, 0, 1163, 1164, 1, 0, 0, 0, 1164, 1165, 6, 144, 10, 0, 1165, 303, 1, 0, 0, 0, 1166, 1167, 3, 74, 30, 0, 1167, 1168, 1, 0, 0, 0, 1168, 1169, 6, 145, 13, 0, 1169, 1170, 6, 145, 14, 0, 1170, 305, 1, 0, 0, 0, 1171, 1172, 5, 105, 0, 0, 1172, 1173, 5, 110, 0, 0, 1173, 1174, 5, 102, 0, 0, 1174, 1175, 5, 111, 0, 0, 1175, 307, 1, 0, 0, 0, 1176, 1177, 3, 54, 20, 0, 1177, 1178, 1, 0, 0, 0, 1178, 1179, 6, 147, 10, 0, 1179, 309, 1, 0, 0, 0, 1180, 1181, 3, 56, 21, 0, 1181, 1182, 1, 0, 0, 0, 1182, 1183, 6, 148, 10, 0, 1183, 311, 1, 0, 0, 0, 1184, 1185, 3, 58, 22, 0, 1185, 1186, 1, 0, 0, 0, 1186, 1187, 6, 149, 10, 0, 1187, 313, 1, 0, 0, 0, 1188, 1189, 3, 74, 30, 0, 1189, 1190, 1, 0, 0, 0, 1190, 1191, 6, 150, 13, 0, 1191, 1192, 6, 150, 14, 0, 1192, 315, 1, 0, 0, 0, 1193, 1194, 5, 102, 0, 0, 1194, 1195, 5, 117, 0, 0, 1195, 1196, 5, 110, 0, 0, 1196, 1197, 5, 99, 0, 0, 1197, 1198, 5, 116, 0, 0, 1198, 1199, 5, 105, 0, 0, 1199, 1200, 5, 111, 0, 0, 1200, 1201, 5, 110, 0, 0, 1201, 1202, 5, 115, 0, 0, 1202, 317, 1, 0, 0, 0, 1203, 1204, 3, 54, 20, 0, 1204, 1205, 1, 0, 0, 0, 1205, 1206, 6, 152, 10, 0, 1206, 319, 1, 0, 0, 0, 1207, 1208, 3, 56, 21, 0, 1208, 1209, 1, 0, 0, 0, 1209, 1210, 6, 153, 10, 0, 1210, 321, 1, 0, 0, 0, 1211, 1212, 3, 58, 22, 0, 1212, 1213, 1, 0, 0, 0, 1213, 1214, 6, 154, 10, 0, 1214, 323, 1, 0, 0, 0, 1215, 1216, 3, 174, 80, 0, 1216, 1217, 1, 0, 0, 0, 1217, 1218, 6, 155, 15, 0, 1218, 1219, 6, 155, 14, 0, 1219, 325, 1, 0, 0, 0, 1220, 1221, 5, 58, 0, 0, 1221, 327, 1, 0, 0, 0, 1222, 1228, 3, 86, 36, 0, 1223, 1228, 3, 76, 31, 0, 1224, 1228, 3, 116, 51, 0, 1225, 1228, 3, 78, 32, 0, 1226, 1228, 3, 92, 39, 0, 1227, 1222, 1, 0, 0, 0, 1227, 1223, 1, 0, 0, 0, 1227, 1224, 1, 0, 0, 0, 1227, 1225, 1, 0, 0, 0, 1227, 1226, 1, 0, 0, 0, 1228, 1229, 1, 0, 0, 0, 1229, 1227, 1, 0, 0, 0, 1229, 1230, 1, 0, 0, 0, 1230, 329, 1, 0, 0, 0, 1231, 1232, 3, 54, 20, 0, 1232, 1233, 1, 0, 0, 0, 1233, 1234, 6, 158, 10, 0, 1234, 331, 1, 0, 0, 0, 1235, 1236, 3, 56, 21, 0, 1236, 1237, 1, 0, 0, 0, 1237, 1238, 6, 159, 10, 0, 1238, 333, 1, 0, 0, 0, 1239, 1240, 3, 58, 22, 0, 1240, 1241, 1, 0, 0, 0, 1241, 1242, 6, 160, 10, 0, 1242, 335, 1, 0, 0, 0, 1243, 1244, 3, 74, 30, 0, 1244, 1245, 1, 0, 0, 0, 1245, 1246, 6, 161, 13, 0, 1246, 1247, 6, 161, 14, 0, 1247, 337, 1, 0, 0, 0, 1248, 1249, 3, 62, 24, 0, 1249, 1250, 1, 0, 0, 0, 1250, 1251, 6, 162, 19, 0, 1251, 1252, 6, 162, 14, 0, 1252, 1253, 6, 162, 28, 0, 1253, 339, 1, 0, 0, 0, 1254, 1255, 3, 54, 20, 0, 1255, 1256, 1, 0, 0, 0, 1256, 1257, 6, 163, 10, 0, 1257, 341, 1, 0, 0, 0, 1258, 1259, 3, 56, 21, 0, 1259, 1260, 1, 0, 0, 0, 1260, 1261, 6, 164, 10, 0, 1261, 343, 1, 0, 0, 0, 1262, 1263, 3, 58, 22, 0, 1263, 1264, 1, 0, 0, 0, 1264, 1265, 6, 165, 10, 0, 1265, 345, 1, 0, 0, 0, 1266, 1267, 3, 112, 49, 0, 1267, 1268, 1, 0, 0, 0, 1268, 1269, 6, 166, 16, 0, 1269, 1270, 6, 166, 14, 0, 1270, 1271, 6, 166, 6, 0, 1271, 347, 1, 0, 0, 0, 1272, 1273, 3, 54, 20, 0, 1273, 1274, 1, 0, 0, 0, 1274, 1275, 6, 167, 10, 0, 1275, 349, 1, 0, 0, 0, 1276, 1277, 3, 56, 21, 0, 1277, 1278, 1, 0, 0, 0, 1278, 1279, 6, 168, 10, 0, 1279, 351, 1, 0, 0, 0, 1280, 1281, 3, 58, 22, 0, 1281, 1282, 1, 0, 0, 0, 1282, 1283, 6, 169, 10, 0, 1283, 353, 1, 0, 0, 0, 1284, 1285, 3, 180, 83, 0, 1285, 1286, 1, 0, 0, 0, 1286, 1287, 6, 170, 14, 0, 1287, 1288, 6, 170, 0, 0, 1288, 1289, 6, 170, 24, 0, 1289, 355, 1, 0, 0, 0, 1290, 1291, 3, 176, 81, 0, 1291, 1292, 1, 0, 0, 0, 1292, 1293, 6, 171, 14, 0, 1293, 1294, 6, 171, 0, 0, 1294, 1295, 6, 171, 27, 0, 1295, 357, 1, 0, 0, 0, 1296, 1297, 3, 102, 44, 0, 1297, 1298, 1, 0, 0, 0, 1298, 1299, 6, 172, 14, 0, 1299, 1300, 6, 172, 0, 0, 1300, 1301, 6, 172, 29, 0, 1301, 359, 1, 0, 0, 0, 1302, 1303, 3, 74, 30, 0, 1303, 1304, 1, 0, 0, 0, 1304, 1305, 6, 173, 13, 0, 1305, 1306, 6, 173, 14, 0, 1306, 361, 1, 0, 0, 0, 60, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 525, 535, 539, 542, 551, 553, 564, 571, 576, 615, 620, 629, 636, 641, 643, 654, 662, 665, 667, 672, 677, 683, 690, 695, 701, 704, 712, 716, 849, 854, 859, 861, 867, 960, 964, 969, 974, 979, 981, 985, 987, 1064, 1068, 1073, 1227, 1229, 30, 5, 2, 0, 5, 4, 0, 5, 6, 0, 5, 1, 0, 5, 3, 0, 5, 10, 0, 5, 12, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 0, 1, 0, 7, 67, 0, 5, 0, 0, 7, 28, 0, 4, 0, 0, 7, 68, 0, 7, 37, 0, 7, 35, 0, 7, 29, 0, 7, 24, 0, 7, 39, 0, 7, 79, 0, 5, 11, 0, 5, 7, 0, 7, 70, 0, 7, 89, 0, 7, 88, 0, 7, 69, 0, 5, 13, 0, 7, 32, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index 75fa8061fa48..d7a73eeb844d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -18,30 +18,32 @@ public class EsqlBaseLexer extends Lexer { new PredictionContextCache(); public static final int DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, INLINESTATS=8, - KEEP=9, LIMIT=10, META=11, MV_EXPAND=12, RENAME=13, ROW=14, SHOW=15, SORT=16, - STATS=17, WHERE=18, UNKNOWN_CMD=19, LINE_COMMENT=20, MULTILINE_COMMENT=21, - WS=22, EXPLAIN_WS=23, EXPLAIN_LINE_COMMENT=24, EXPLAIN_MULTILINE_COMMENT=25, - PIPE=26, QUOTED_STRING=27, INTEGER_LITERAL=28, DECIMAL_LITERAL=29, BY=30, - AND=31, ASC=32, ASSIGN=33, CAST_OP=34, COMMA=35, DESC=36, DOT=37, FALSE=38, - FIRST=39, LAST=40, LP=41, IN=42, IS=43, LIKE=44, NOT=45, NULL=46, NULLS=47, - OR=48, PARAM=49, RLIKE=50, RP=51, TRUE=52, EQ=53, CIEQ=54, NEQ=55, LT=56, - LTE=57, GT=58, GTE=59, PLUS=60, MINUS=61, ASTERISK=62, SLASH=63, PERCENT=64, - OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, - EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, OPTIONS=72, - METADATA=73, FROM_UNQUOTED_IDENTIFIER=74, FROM_LINE_COMMENT=75, FROM_MULTILINE_COMMENT=76, - FROM_WS=77, ID_PATTERN=78, PROJECT_LINE_COMMENT=79, PROJECT_MULTILINE_COMMENT=80, - PROJECT_WS=81, AS=82, RENAME_LINE_COMMENT=83, RENAME_MULTILINE_COMMENT=84, - RENAME_WS=85, ON=86, WITH=87, ENRICH_POLICY_NAME=88, ENRICH_LINE_COMMENT=89, - ENRICH_MULTILINE_COMMENT=90, ENRICH_WS=91, ENRICH_FIELD_LINE_COMMENT=92, - ENRICH_FIELD_MULTILINE_COMMENT=93, ENRICH_FIELD_WS=94, MVEXPAND_LINE_COMMENT=95, - MVEXPAND_MULTILINE_COMMENT=96, MVEXPAND_WS=97, INFO=98, SHOW_LINE_COMMENT=99, - SHOW_MULTILINE_COMMENT=100, SHOW_WS=101, FUNCTIONS=102, META_LINE_COMMENT=103, - META_MULTILINE_COMMENT=104, META_WS=105, COLON=106, SETTING=107, SETTING_LINE_COMMENT=108, - SETTTING_MULTILINE_COMMENT=109, SETTING_WS=110; + KEEP=9, LIMIT=10, META=11, METRICS=12, MV_EXPAND=13, RENAME=14, ROW=15, + SHOW=16, SORT=17, STATS=18, WHERE=19, UNKNOWN_CMD=20, LINE_COMMENT=21, + MULTILINE_COMMENT=22, WS=23, INDEX_UNQUOTED_IDENTIFIER=24, EXPLAIN_WS=25, + EXPLAIN_LINE_COMMENT=26, EXPLAIN_MULTILINE_COMMENT=27, PIPE=28, QUOTED_STRING=29, + INTEGER_LITERAL=30, DECIMAL_LITERAL=31, BY=32, AND=33, ASC=34, ASSIGN=35, + CAST_OP=36, COMMA=37, DESC=38, DOT=39, FALSE=40, FIRST=41, LAST=42, LP=43, + IN=44, IS=45, LIKE=46, NOT=47, NULL=48, NULLS=49, OR=50, PARAM=51, RLIKE=52, + RP=53, TRUE=54, EQ=55, CIEQ=56, NEQ=57, LT=58, LTE=59, GT=60, GTE=61, + PLUS=62, MINUS=63, ASTERISK=64, SLASH=65, PERCENT=66, OPENING_BRACKET=67, + CLOSING_BRACKET=68, UNQUOTED_IDENTIFIER=69, QUOTED_IDENTIFIER=70, EXPR_LINE_COMMENT=71, + EXPR_MULTILINE_COMMENT=72, EXPR_WS=73, OPTIONS=74, METADATA=75, FROM_LINE_COMMENT=76, + FROM_MULTILINE_COMMENT=77, FROM_WS=78, ID_PATTERN=79, PROJECT_LINE_COMMENT=80, + PROJECT_MULTILINE_COMMENT=81, PROJECT_WS=82, AS=83, RENAME_LINE_COMMENT=84, + RENAME_MULTILINE_COMMENT=85, RENAME_WS=86, ON=87, WITH=88, ENRICH_POLICY_NAME=89, + ENRICH_LINE_COMMENT=90, ENRICH_MULTILINE_COMMENT=91, ENRICH_WS=92, ENRICH_FIELD_LINE_COMMENT=93, + ENRICH_FIELD_MULTILINE_COMMENT=94, ENRICH_FIELD_WS=95, MVEXPAND_LINE_COMMENT=96, + MVEXPAND_MULTILINE_COMMENT=97, MVEXPAND_WS=98, INFO=99, SHOW_LINE_COMMENT=100, + SHOW_MULTILINE_COMMENT=101, SHOW_WS=102, FUNCTIONS=103, META_LINE_COMMENT=104, + META_MULTILINE_COMMENT=105, META_WS=106, COLON=107, SETTING=108, SETTING_LINE_COMMENT=109, + SETTTING_MULTILINE_COMMENT=110, SETTING_WS=111, METRICS_LINE_COMMENT=112, + METRICS_MULTILINE_COMMENT=113, METRICS_WS=114, CLOSING_METRICS_LINE_COMMENT=115, + CLOSING_METRICS_MULTILINE_COMMENT=116, CLOSING_METRICS_WS=117; public static final int EXPLAIN_MODE=1, EXPRESSION_MODE=2, FROM_MODE=3, PROJECT_MODE=4, RENAME_MODE=5, ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9, META_MODE=10, - SETTING_MODE=11; + SETTING_MODE=11, METRICS_MODE=12, CLOSING_METRICS_MODE=13; public static String[] channelNames = { "DEFAULT_TOKEN_CHANNEL", "HIDDEN" }; @@ -49,15 +51,16 @@ public class EsqlBaseLexer extends Lexer { public static String[] modeNames = { "DEFAULT_MODE", "EXPLAIN_MODE", "EXPRESSION_MODE", "FROM_MODE", "PROJECT_MODE", "RENAME_MODE", "ENRICH_MODE", "ENRICH_FIELD_MODE", "MVEXPAND_MODE", "SHOW_MODE", - "META_MODE", "SETTING_MODE" + "META_MODE", "SETTING_MODE", "METRICS_MODE", "CLOSING_METRICS_MODE" }; private static String[] makeRuleNames() { return new String[] { "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "INLINESTATS", - "KEEP", "LIMIT", "META", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", - "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", - "WS", "EXPLAIN_OPENING_BRACKET", "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", + "KEEP", "LIMIT", "META", "METRICS", "MV_EXPAND", "RENAME", "ROW", "SHOW", + "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", + "WS", "INDEX_UNQUOTED_IDENTIFIER_PART", "INDEX_UNQUOTED_IDENTIFIER", + "EXPLAIN_OPENING_BRACKET", "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "PIPE", "DIGIT", "LETTER", "ESCAPE_SEQUENCE", "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", "BACKQUOTE_BLOCK", "UNDERSCORE", "UNQUOTED_ID_BODY", "QUOTED_STRING", "INTEGER_LITERAL", @@ -69,23 +72,27 @@ private static String[] makeRuleNames() { "QUOTED_ID", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", "FROM_COMMA", "FROM_ASSIGN", "FROM_QUOTED_STRING", "OPTIONS", "METADATA", - "FROM_UNQUOTED_IDENTIFIER_PART", "FROM_UNQUOTED_IDENTIFIER", "FROM_LINE_COMMENT", - "FROM_MULTILINE_COMMENT", "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", - "UNQUOTED_ID_BODY_WITH_PATTERN", "UNQUOTED_ID_PATTERN", "ID_PATTERN", - "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "RENAME_PIPE", - "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", "AS", "RENAME_ID_PATTERN", - "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ENRICH_PIPE", - "ENRICH_OPENING_BRACKET", "ON", "WITH", "ENRICH_POLICY_NAME_BODY", "ENRICH_POLICY_NAME", - "ENRICH_QUOTED_IDENTIFIER", "ENRICH_MODE_UNQUOTED_VALUE", "ENRICH_LINE_COMMENT", - "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_PIPE", "ENRICH_FIELD_ASSIGN", - "ENRICH_FIELD_COMMA", "ENRICH_FIELD_DOT", "ENRICH_FIELD_WITH", "ENRICH_FIELD_ID_PATTERN", - "ENRICH_FIELD_QUOTED_IDENTIFIER", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", - "ENRICH_FIELD_WS", "MVEXPAND_PIPE", "MVEXPAND_DOT", "MVEXPAND_QUOTED_IDENTIFIER", - "MVEXPAND_UNQUOTED_IDENTIFIER", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", - "MVEXPAND_WS", "SHOW_PIPE", "INFO", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", - "SHOW_WS", "META_PIPE", "FUNCTIONS", "META_LINE_COMMENT", "META_MULTILINE_COMMENT", + "FROM_INDEX_UNQUOTED_IDENTIFIER", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", "UNQUOTED_ID_BODY_WITH_PATTERN", + "UNQUOTED_ID_PATTERN", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "RENAME_PIPE", "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", + "AS", "RENAME_ID_PATTERN", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", + "RENAME_WS", "ENRICH_PIPE", "ENRICH_OPENING_BRACKET", "ON", "WITH", "ENRICH_POLICY_NAME_BODY", + "ENRICH_POLICY_NAME", "ENRICH_QUOTED_IDENTIFIER", "ENRICH_MODE_UNQUOTED_VALUE", + "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_PIPE", + "ENRICH_FIELD_ASSIGN", "ENRICH_FIELD_COMMA", "ENRICH_FIELD_DOT", "ENRICH_FIELD_WITH", + "ENRICH_FIELD_ID_PATTERN", "ENRICH_FIELD_QUOTED_IDENTIFIER", "ENRICH_FIELD_LINE_COMMENT", + "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_PIPE", + "MVEXPAND_DOT", "MVEXPAND_QUOTED_IDENTIFIER", "MVEXPAND_UNQUOTED_IDENTIFIER", + "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", + "SHOW_PIPE", "INFO", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", "SHOW_WS", + "META_PIPE", "FUNCTIONS", "META_LINE_COMMENT", "META_MULTILINE_COMMENT", "META_WS", "SETTING_CLOSING_BRACKET", "COLON", "SETTING", "SETTING_LINE_COMMENT", - "SETTTING_MULTILINE_COMMENT", "SETTING_WS" + "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "METRICS_PIPE", "METRICS_INDEX_UNQUOTED_IDENTIFIER", + "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_COMMA", + "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", + "CLOSING_METRICS_WS", "CLOSING_METRICS_QUOTED_IDENTIFIER", "CLOSING_METRICS_UNQUOTED_IDENTIFIER", + "CLOSING_METRICS_BY", "CLOSING_METRICS_PIPE" }; } public static final String[] ruleNames = makeRuleNames(); @@ -93,15 +100,15 @@ private static String[] makeRuleNames() { private static String[] makeLiteralNames() { return new String[] { null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", - "'grok'", "'inlinestats'", "'keep'", "'limit'", "'meta'", "'mv_expand'", - "'rename'", "'row'", "'show'", "'sort'", "'stats'", "'where'", null, - null, null, null, null, null, null, "'|'", null, null, null, "'by'", - "'and'", "'asc'", "'='", "'::'", "','", "'desc'", "'.'", "'false'", "'first'", - "'last'", "'('", "'in'", "'is'", "'like'", "'not'", "'null'", "'nulls'", - "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", - "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, "']'", - null, null, null, null, null, "'options'", "'metadata'", null, null, - null, null, null, null, null, null, "'as'", null, null, null, "'on'", + "'grok'", "'inlinestats'", "'keep'", "'limit'", "'meta'", "'metrics'", + "'mv_expand'", "'rename'", "'row'", "'show'", "'sort'", "'stats'", "'where'", + null, null, null, null, null, null, null, null, "'|'", null, null, null, + "'by'", "'and'", "'asc'", "'='", "'::'", "','", "'desc'", "'.'", "'false'", + "'first'", "'last'", "'('", "'in'", "'is'", "'like'", "'not'", "'null'", + "'nulls'", "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", + "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", + null, "']'", null, null, null, null, null, "'options'", "'metadata'", + null, null, null, null, null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, null, null, null, null, null, null, null, null, "'info'", null, null, null, "'functions'", null, null, null, "':'" }; @@ -110,25 +117,28 @@ private static String[] makeLiteralNames() { private static String[] makeSymbolicNames() { return new String[] { null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", - "INLINESTATS", "KEEP", "LIMIT", "META", "MV_EXPAND", "RENAME", "ROW", - "SHOW", "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", - "WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", - "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", - "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", "DESC", "DOT", "FALSE", "FIRST", - "LAST", "LP", "IN", "IS", "LIKE", "NOT", "NULL", "NULLS", "OR", "PARAM", - "RLIKE", "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", - "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", - "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", - "EXPR_WS", "OPTIONS", "METADATA", "FROM_UNQUOTED_IDENTIFIER", "FROM_LINE_COMMENT", - "FROM_MULTILINE_COMMENT", "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", - "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", - "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", - "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", + "INLINESTATS", "KEEP", "LIMIT", "META", "METRICS", "MV_EXPAND", "RENAME", + "ROW", "SHOW", "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", + "MULTILINE_COMMENT", "WS", "INDEX_UNQUOTED_IDENTIFIER", "EXPLAIN_WS", + "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "PIPE", "QUOTED_STRING", + "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", + "COMMA", "DESC", "DOT", "FALSE", "FIRST", "LAST", "LP", "IN", "IS", "LIKE", + "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", + "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", + "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", + "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", + "OPTIONS", "METADATA", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", + "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", + "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", "SHOW_WS", "FUNCTIONS", "META_LINE_COMMENT", "META_MULTILINE_COMMENT", "META_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", - "SETTTING_MULTILINE_COMMENT", "SETTING_WS" + "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", + "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", + "CLOSING_METRICS_WS" }; } private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); @@ -191,759 +201,832 @@ public EsqlBaseLexer(CharStream input) { public ATN getATN() { return _ATN; } public static final String _serializedATN = - "\u0004\u0000n\u04ad\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ + "\u0004\u0000u\u051b\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ - "\u0006\uffff\uffff\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ - "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ - "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ - "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ - "\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e\u0002\u000f\u0007\u000f"+ - "\u0002\u0010\u0007\u0010\u0002\u0011\u0007\u0011\u0002\u0012\u0007\u0012"+ - "\u0002\u0013\u0007\u0013\u0002\u0014\u0007\u0014\u0002\u0015\u0007\u0015"+ - "\u0002\u0016\u0007\u0016\u0002\u0017\u0007\u0017\u0002\u0018\u0007\u0018"+ - "\u0002\u0019\u0007\u0019\u0002\u001a\u0007\u001a\u0002\u001b\u0007\u001b"+ - "\u0002\u001c\u0007\u001c\u0002\u001d\u0007\u001d\u0002\u001e\u0007\u001e"+ - "\u0002\u001f\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007\"\u0002"+ - "#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002"+ - "(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002"+ - "-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u0002"+ - "2\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u00076\u0002"+ - "7\u00077\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007;\u0002"+ - "<\u0007<\u0002=\u0007=\u0002>\u0007>\u0002?\u0007?\u0002@\u0007@\u0002"+ - "A\u0007A\u0002B\u0007B\u0002C\u0007C\u0002D\u0007D\u0002E\u0007E\u0002"+ - "F\u0007F\u0002G\u0007G\u0002H\u0007H\u0002I\u0007I\u0002J\u0007J\u0002"+ - "K\u0007K\u0002L\u0007L\u0002M\u0007M\u0002N\u0007N\u0002O\u0007O\u0002"+ - "P\u0007P\u0002Q\u0007Q\u0002R\u0007R\u0002S\u0007S\u0002T\u0007T\u0002"+ - "U\u0007U\u0002V\u0007V\u0002W\u0007W\u0002X\u0007X\u0002Y\u0007Y\u0002"+ - "Z\u0007Z\u0002[\u0007[\u0002\\\u0007\\\u0002]\u0007]\u0002^\u0007^\u0002"+ - "_\u0007_\u0002`\u0007`\u0002a\u0007a\u0002b\u0007b\u0002c\u0007c\u0002"+ - "d\u0007d\u0002e\u0007e\u0002f\u0007f\u0002g\u0007g\u0002h\u0007h\u0002"+ - "i\u0007i\u0002j\u0007j\u0002k\u0007k\u0002l\u0007l\u0002m\u0007m\u0002"+ - "n\u0007n\u0002o\u0007o\u0002p\u0007p\u0002q\u0007q\u0002r\u0007r\u0002"+ - "s\u0007s\u0002t\u0007t\u0002u\u0007u\u0002v\u0007v\u0002w\u0007w\u0002"+ - "x\u0007x\u0002y\u0007y\u0002z\u0007z\u0002{\u0007{\u0002|\u0007|\u0002"+ - "}\u0007}\u0002~\u0007~\u0002\u007f\u0007\u007f\u0002\u0080\u0007\u0080"+ - "\u0002\u0081\u0007\u0081\u0002\u0082\u0007\u0082\u0002\u0083\u0007\u0083"+ - "\u0002\u0084\u0007\u0084\u0002\u0085\u0007\u0085\u0002\u0086\u0007\u0086"+ - "\u0002\u0087\u0007\u0087\u0002\u0088\u0007\u0088\u0002\u0089\u0007\u0089"+ - "\u0002\u008a\u0007\u008a\u0002\u008b\u0007\u008b\u0002\u008c\u0007\u008c"+ - "\u0002\u008d\u0007\u008d\u0002\u008e\u0007\u008e\u0002\u008f\u0007\u008f"+ - "\u0002\u0090\u0007\u0090\u0002\u0091\u0007\u0091\u0002\u0092\u0007\u0092"+ - "\u0002\u0093\u0007\u0093\u0002\u0094\u0007\u0094\u0002\u0095\u0007\u0095"+ - "\u0002\u0096\u0007\u0096\u0002\u0097\u0007\u0097\u0002\u0098\u0007\u0098"+ - "\u0002\u0099\u0007\u0099\u0002\u009a\u0007\u009a\u0002\u009b\u0007\u009b"+ - "\u0002\u009c\u0007\u009c\u0002\u009d\u0007\u009d\u0002\u009e\u0007\u009e"+ - "\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000"+ - "\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0002"+ - "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+ - "\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0004"+ - "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004"+ - "\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ - "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001\u0007"+ - "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ - "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001"+ - "\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001"+ - "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ - "\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ + "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0002\u0000\u0007"+ + "\u0000\u0002\u0001\u0007\u0001\u0002\u0002\u0007\u0002\u0002\u0003\u0007"+ + "\u0003\u0002\u0004\u0007\u0004\u0002\u0005\u0007\u0005\u0002\u0006\u0007"+ + "\u0006\u0002\u0007\u0007\u0007\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n"+ + "\u0007\n\u0002\u000b\u0007\u000b\u0002\f\u0007\f\u0002\r\u0007\r\u0002"+ + "\u000e\u0007\u000e\u0002\u000f\u0007\u000f\u0002\u0010\u0007\u0010\u0002"+ + "\u0011\u0007\u0011\u0002\u0012\u0007\u0012\u0002\u0013\u0007\u0013\u0002"+ + "\u0014\u0007\u0014\u0002\u0015\u0007\u0015\u0002\u0016\u0007\u0016\u0002"+ + "\u0017\u0007\u0017\u0002\u0018\u0007\u0018\u0002\u0019\u0007\u0019\u0002"+ + "\u001a\u0007\u001a\u0002\u001b\u0007\u001b\u0002\u001c\u0007\u001c\u0002"+ + "\u001d\u0007\u001d\u0002\u001e\u0007\u001e\u0002\u001f\u0007\u001f\u0002"+ + " \u0007 \u0002!\u0007!\u0002\"\u0007\"\u0002#\u0007#\u0002$\u0007$\u0002"+ + "%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002(\u0007(\u0002)\u0007)\u0002"+ + "*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002-\u0007-\u0002.\u0007.\u0002"+ + "/\u0007/\u00020\u00070\u00021\u00071\u00022\u00072\u00023\u00073\u0002"+ + "4\u00074\u00025\u00075\u00026\u00076\u00027\u00077\u00028\u00078\u0002"+ + "9\u00079\u0002:\u0007:\u0002;\u0007;\u0002<\u0007<\u0002=\u0007=\u0002"+ + ">\u0007>\u0002?\u0007?\u0002@\u0007@\u0002A\u0007A\u0002B\u0007B\u0002"+ + "C\u0007C\u0002D\u0007D\u0002E\u0007E\u0002F\u0007F\u0002G\u0007G\u0002"+ + "H\u0007H\u0002I\u0007I\u0002J\u0007J\u0002K\u0007K\u0002L\u0007L\u0002"+ + "M\u0007M\u0002N\u0007N\u0002O\u0007O\u0002P\u0007P\u0002Q\u0007Q\u0002"+ + "R\u0007R\u0002S\u0007S\u0002T\u0007T\u0002U\u0007U\u0002V\u0007V\u0002"+ + "W\u0007W\u0002X\u0007X\u0002Y\u0007Y\u0002Z\u0007Z\u0002[\u0007[\u0002"+ + "\\\u0007\\\u0002]\u0007]\u0002^\u0007^\u0002_\u0007_\u0002`\u0007`\u0002"+ + "a\u0007a\u0002b\u0007b\u0002c\u0007c\u0002d\u0007d\u0002e\u0007e\u0002"+ + "f\u0007f\u0002g\u0007g\u0002h\u0007h\u0002i\u0007i\u0002j\u0007j\u0002"+ + "k\u0007k\u0002l\u0007l\u0002m\u0007m\u0002n\u0007n\u0002o\u0007o\u0002"+ + "p\u0007p\u0002q\u0007q\u0002r\u0007r\u0002s\u0007s\u0002t\u0007t\u0002"+ + "u\u0007u\u0002v\u0007v\u0002w\u0007w\u0002x\u0007x\u0002y\u0007y\u0002"+ + "z\u0007z\u0002{\u0007{\u0002|\u0007|\u0002}\u0007}\u0002~\u0007~\u0002"+ + "\u007f\u0007\u007f\u0002\u0080\u0007\u0080\u0002\u0081\u0007\u0081\u0002"+ + "\u0082\u0007\u0082\u0002\u0083\u0007\u0083\u0002\u0084\u0007\u0084\u0002"+ + "\u0085\u0007\u0085\u0002\u0086\u0007\u0086\u0002\u0087\u0007\u0087\u0002"+ + "\u0088\u0007\u0088\u0002\u0089\u0007\u0089\u0002\u008a\u0007\u008a\u0002"+ + "\u008b\u0007\u008b\u0002\u008c\u0007\u008c\u0002\u008d\u0007\u008d\u0002"+ + "\u008e\u0007\u008e\u0002\u008f\u0007\u008f\u0002\u0090\u0007\u0090\u0002"+ + "\u0091\u0007\u0091\u0002\u0092\u0007\u0092\u0002\u0093\u0007\u0093\u0002"+ + "\u0094\u0007\u0094\u0002\u0095\u0007\u0095\u0002\u0096\u0007\u0096\u0002"+ + "\u0097\u0007\u0097\u0002\u0098\u0007\u0098\u0002\u0099\u0007\u0099\u0002"+ + "\u009a\u0007\u009a\u0002\u009b\u0007\u009b\u0002\u009c\u0007\u009c\u0002"+ + "\u009d\u0007\u009d\u0002\u009e\u0007\u009e\u0002\u009f\u0007\u009f\u0002"+ + "\u00a0\u0007\u00a0\u0002\u00a1\u0007\u00a1\u0002\u00a2\u0007\u00a2\u0002"+ + "\u00a3\u0007\u00a3\u0002\u00a4\u0007\u00a4\u0002\u00a5\u0007\u00a5\u0002"+ + "\u00a6\u0007\u00a6\u0002\u00a7\u0007\u00a7\u0002\u00a8\u0007\u00a8\u0002"+ + "\u00a9\u0007\u00a9\u0002\u00aa\u0007\u00aa\u0002\u00ab\u0007\u00ab\u0002"+ + "\u00ac\u0007\u00ac\u0002\u00ad\u0007\u00ad\u0001\u0000\u0001\u0000\u0001"+ + "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ + "\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ + "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ + "\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ + "\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ + "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ + "\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b"+ + "\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ "\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ - "\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001"+ - "\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001"+ + "\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001"+ + "\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001"+ + "\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001"+ "\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ - "\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ - "\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ - "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001"+ - "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ - "\u0012\u0004\u0012\u01e2\b\u0012\u000b\u0012\f\u0012\u01e3\u0001\u0012"+ - "\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0005\u0013"+ - "\u01ec\b\u0013\n\u0013\f\u0013\u01ef\t\u0013\u0001\u0013\u0003\u0013\u01f2"+ - "\b\u0013\u0001\u0013\u0003\u0013\u01f5\b\u0013\u0001\u0013\u0001\u0013"+ - "\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0005\u0014"+ - "\u01fe\b\u0014\n\u0014\f\u0014\u0201\t\u0014\u0001\u0014\u0001\u0014\u0001"+ - "\u0014\u0001\u0014\u0001\u0014\u0001\u0015\u0004\u0015\u0209\b\u0015\u000b"+ - "\u0015\f\u0015\u020a\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001"+ - "\u0016\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ - "\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0001"+ - "\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0001"+ - "\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0001\u001b\u0001"+ - "\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001"+ - "\u001e\u0001\u001f\u0001\u001f\u0001 \u0001 \u0003 \u0234\b \u0001 \u0004"+ - " \u0237\b \u000b \f \u0238\u0001!\u0001!\u0001\"\u0001\"\u0001#\u0001"+ - "#\u0001#\u0003#\u0242\b#\u0001$\u0001$\u0001%\u0001%\u0001%\u0003%\u0249"+ - "\b%\u0001&\u0001&\u0001&\u0005&\u024e\b&\n&\f&\u0251\t&\u0001&\u0001&"+ - "\u0001&\u0001&\u0001&\u0001&\u0005&\u0259\b&\n&\f&\u025c\t&\u0001&\u0001"+ - "&\u0001&\u0001&\u0001&\u0003&\u0263\b&\u0001&\u0003&\u0266\b&\u0003&\u0268"+ - "\b&\u0001\'\u0004\'\u026b\b\'\u000b\'\f\'\u026c\u0001(\u0004(\u0270\b"+ - "(\u000b(\f(\u0271\u0001(\u0001(\u0005(\u0276\b(\n(\f(\u0279\t(\u0001("+ - "\u0001(\u0004(\u027d\b(\u000b(\f(\u027e\u0001(\u0004(\u0282\b(\u000b("+ - "\f(\u0283\u0001(\u0001(\u0005(\u0288\b(\n(\f(\u028b\t(\u0003(\u028d\b"+ - "(\u0001(\u0001(\u0001(\u0001(\u0004(\u0293\b(\u000b(\f(\u0294\u0001(\u0001"+ - "(\u0003(\u0299\b(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0001*\u0001"+ - "+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001-\u0001-\u0001-\u0001.\u0001"+ - ".\u0001/\u0001/\u0001/\u0001/\u0001/\u00010\u00010\u00011\u00011\u0001"+ - "1\u00011\u00011\u00011\u00012\u00012\u00012\u00012\u00012\u00012\u0001"+ - "3\u00013\u00013\u00013\u00013\u00014\u00014\u00015\u00015\u00015\u0001"+ - "6\u00016\u00016\u00017\u00017\u00017\u00017\u00017\u00018\u00018\u0001"+ - "8\u00018\u00019\u00019\u00019\u00019\u00019\u0001:\u0001:\u0001:\u0001"+ - ":\u0001:\u0001:\u0001;\u0001;\u0001;\u0001<\u0001<\u0001=\u0001=\u0001"+ - "=\u0001=\u0001=\u0001=\u0001>\u0001>\u0001?\u0001?\u0001?\u0001?\u0001"+ - "?\u0001@\u0001@\u0001@\u0001A\u0001A\u0001A\u0001B\u0001B\u0001B\u0001"+ - "C\u0001C\u0001D\u0001D\u0001D\u0001E\u0001E\u0001F\u0001F\u0001F\u0001"+ - "G\u0001G\u0001H\u0001H\u0001I\u0001I\u0001J\u0001J\u0001K\u0001K\u0001"+ - "L\u0001L\u0001L\u0001L\u0001L\u0001M\u0001M\u0001M\u0001M\u0001M\u0001"+ - "N\u0001N\u0005N\u031c\bN\nN\fN\u031f\tN\u0001N\u0001N\u0003N\u0323\bN"+ - "\u0001N\u0004N\u0326\bN\u000bN\fN\u0327\u0003N\u032a\bN\u0001O\u0001O"+ - "\u0004O\u032e\bO\u000bO\fO\u032f\u0001O\u0001O\u0001P\u0001P\u0001Q\u0001"+ - "Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001R\u0001S\u0001S\u0001S\u0001"+ - "S\u0001T\u0001T\u0001T\u0001T\u0001T\u0001U\u0001U\u0001U\u0001U\u0001"+ - "V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001W\u0001W\u0001X\u0001X\u0001"+ - "X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001"+ - "Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001[\u0001[\u0001"+ - "[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0003\\\u036f\b\\\u0001]\u0004"+ - "]\u0372\b]\u000b]\f]\u0373\u0001^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001"+ - "_\u0001_\u0001`\u0001`\u0001`\u0001`\u0001a\u0001a\u0001a\u0001a\u0001"+ - "a\u0001b\u0001b\u0001b\u0001b\u0001c\u0001c\u0001c\u0001c\u0001d\u0001"+ - "d\u0001d\u0001d\u0003d\u0393\bd\u0001e\u0001e\u0003e\u0397\be\u0001e\u0005"+ - "e\u039a\be\ne\fe\u039d\te\u0001e\u0001e\u0003e\u03a1\be\u0001e\u0004e"+ - "\u03a4\be\u000be\fe\u03a5\u0003e\u03a8\be\u0001f\u0001f\u0004f\u03ac\b"+ - "f\u000bf\ff\u03ad\u0001g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001"+ - "h\u0001i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001j\u0001j\u0001"+ - "k\u0001k\u0001k\u0001k\u0001l\u0001l\u0001l\u0001l\u0001m\u0001m\u0001"+ - "m\u0001m\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001p\u0001"+ - "p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001r\u0001r\u0001r\u0001"+ - "r\u0001s\u0001s\u0001s\u0001s\u0001s\u0001t\u0001t\u0001t\u0001t\u0001"+ - "t\u0001u\u0001u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001v\u0001v\u0001"+ - "v\u0001v\u0001v\u0001w\u0001w\u0001x\u0004x\u03f9\bx\u000bx\fx\u03fa\u0001"+ - "x\u0001x\u0003x\u03ff\bx\u0001x\u0004x\u0402\bx\u000bx\fx\u0403\u0001"+ - "y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001{\u0001{\u0001"+ - "{\u0001{\u0001|\u0001|\u0001|\u0001|\u0001}\u0001}\u0001}\u0001}\u0001"+ - "~\u0001~\u0001~\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f"+ - "\u0001\u007f\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0081"+ - "\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0082\u0001\u0082\u0001\u0082"+ - "\u0001\u0082\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0084"+ - "\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0085"+ - "\u0001\u0085\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0087"+ - "\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088"+ - "\u0001\u0088\u0001\u0088\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089"+ - "\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b"+ - "\u0001\u008b\u0001\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c"+ - "\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e"+ - "\u0001\u008e\u0001\u008e\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f"+ - "\u0001\u008f\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0090"+ - "\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0092\u0001\u0092"+ - "\u0001\u0092\u0001\u0092\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093"+ - "\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0094\u0001\u0095"+ - "\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095"+ - "\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0096\u0001\u0096\u0001\u0096"+ - "\u0001\u0096\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0098"+ - "\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0099\u0001\u0099\u0001\u0099"+ - "\u0001\u0099\u0001\u0099\u0001\u009a\u0001\u009a\u0001\u009b\u0001\u009b"+ - "\u0001\u009b\u0001\u009b\u0001\u009b\u0004\u009b\u049e\b\u009b\u000b\u009b"+ - "\f\u009b\u049f\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009c\u0001\u009d"+ - "\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009e\u0001\u009e\u0001\u009e"+ - "\u0001\u009e\u0002\u01ff\u025a\u0000\u009f\f\u0001\u000e\u0002\u0010\u0003"+ - "\u0012\u0004\u0014\u0005\u0016\u0006\u0018\u0007\u001a\b\u001c\t\u001e"+ - "\n \u000b\"\f$\r&\u000e(\u000f*\u0010,\u0011.\u00120\u00132\u00144\u0015"+ - "6\u00168\u0000:\u0000<\u0017>\u0018@\u0019B\u001aD\u0000F\u0000H\u0000"+ - "J\u0000L\u0000N\u0000P\u0000R\u0000T\u0000V\u0000X\u001bZ\u001c\\\u001d"+ - "^\u001e`\u001fb d!f\"h#j$l%n&p\'r(t)v*x+z,|-~.\u0080/\u00820\u00841\u0086"+ - "2\u00883\u008a4\u008c5\u008e6\u00907\u00928\u00949\u0096:\u0098;\u009a"+ - "<\u009c=\u009e>\u00a0?\u00a2@\u00a4A\u00a6B\u00a8C\u00aa\u0000\u00acD"+ - "\u00aeE\u00b0F\u00b2G\u00b4\u0000\u00b6\u0000\u00b8\u0000\u00ba\u0000"+ - "\u00bc\u0000\u00be\u0000\u00c0H\u00c2I\u00c4\u0000\u00c6J\u00c8K\u00ca"+ - "L\u00ccM\u00ce\u0000\u00d0\u0000\u00d2\u0000\u00d4\u0000\u00d6\u0000\u00d8"+ - "N\u00daO\u00dcP\u00deQ\u00e0\u0000\u00e2\u0000\u00e4\u0000\u00e6\u0000"+ - "\u00e8R\u00ea\u0000\u00ecS\u00eeT\u00f0U\u00f2\u0000\u00f4\u0000\u00f6"+ - "V\u00f8W\u00fa\u0000\u00fcX\u00fe\u0000\u0100\u0000\u0102Y\u0104Z\u0106"+ - "[\u0108\u0000\u010a\u0000\u010c\u0000\u010e\u0000\u0110\u0000\u0112\u0000"+ - "\u0114\u0000\u0116\\\u0118]\u011a^\u011c\u0000\u011e\u0000\u0120\u0000"+ - "\u0122\u0000\u0124_\u0126`\u0128a\u012a\u0000\u012cb\u012ec\u0130d\u0132"+ - "e\u0134\u0000\u0136f\u0138g\u013ah\u013ci\u013e\u0000\u0140j\u0142k\u0144"+ - "l\u0146m\u0148n\f\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t"+ - "\n\u000b\r\u0006\u0000\t\n\r\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000"+ - "\t\n\r\r \u0001\u000009\u0002\u0000AZaz\u0005\u0000\"\"\\\\nnrrtt\u0004"+ - "\u0000\n\n\r\r\"\"\\\\\u0002\u0000EEee\u0002\u0000++--\u0001\u0000``\n"+ - "\u0000\t\n\r\r ,,//==[[]]``||\u0002\u0000**//\u000b\u0000\t\n\r\r \""+ - "#,,//::<<>?\\\\||\u04c8\u0000\f\u0001\u0000\u0000\u0000\u0000\u000e\u0001"+ - "\u0000\u0000\u0000\u0000\u0010\u0001\u0000\u0000\u0000\u0000\u0012\u0001"+ - "\u0000\u0000\u0000\u0000\u0014\u0001\u0000\u0000\u0000\u0000\u0016\u0001"+ - "\u0000\u0000\u0000\u0000\u0018\u0001\u0000\u0000\u0000\u0000\u001a\u0001"+ - "\u0000\u0000\u0000\u0000\u001c\u0001\u0000\u0000\u0000\u0000\u001e\u0001"+ - "\u0000\u0000\u0000\u0000 \u0001\u0000\u0000\u0000\u0000\"\u0001\u0000"+ - "\u0000\u0000\u0000$\u0001\u0000\u0000\u0000\u0000&\u0001\u0000\u0000\u0000"+ - "\u0000(\u0001\u0000\u0000\u0000\u0000*\u0001\u0000\u0000\u0000\u0000,"+ - "\u0001\u0000\u0000\u0000\u0000.\u0001\u0000\u0000\u0000\u00000\u0001\u0000"+ - "\u0000\u0000\u00002\u0001\u0000\u0000\u0000\u00004\u0001\u0000\u0000\u0000"+ - "\u00006\u0001\u0000\u0000\u0000\u00018\u0001\u0000\u0000\u0000\u0001:"+ - "\u0001\u0000\u0000\u0000\u0001<\u0001\u0000\u0000\u0000\u0001>\u0001\u0000"+ - "\u0000\u0000\u0001@\u0001\u0000\u0000\u0000\u0002B\u0001\u0000\u0000\u0000"+ - "\u0002X\u0001\u0000\u0000\u0000\u0002Z\u0001\u0000\u0000\u0000\u0002\\"+ - "\u0001\u0000\u0000\u0000\u0002^\u0001\u0000\u0000\u0000\u0002`\u0001\u0000"+ - "\u0000\u0000\u0002b\u0001\u0000\u0000\u0000\u0002d\u0001\u0000\u0000\u0000"+ - "\u0002f\u0001\u0000\u0000\u0000\u0002h\u0001\u0000\u0000\u0000\u0002j"+ - "\u0001\u0000\u0000\u0000\u0002l\u0001\u0000\u0000\u0000\u0002n\u0001\u0000"+ - "\u0000\u0000\u0002p\u0001\u0000\u0000\u0000\u0002r\u0001\u0000\u0000\u0000"+ - "\u0002t\u0001\u0000\u0000\u0000\u0002v\u0001\u0000\u0000\u0000\u0002x"+ - "\u0001\u0000\u0000\u0000\u0002z\u0001\u0000\u0000\u0000\u0002|\u0001\u0000"+ - "\u0000\u0000\u0002~\u0001\u0000\u0000\u0000\u0002\u0080\u0001\u0000\u0000"+ - "\u0000\u0002\u0082\u0001\u0000\u0000\u0000\u0002\u0084\u0001\u0000\u0000"+ - "\u0000\u0002\u0086\u0001\u0000\u0000\u0000\u0002\u0088\u0001\u0000\u0000"+ - "\u0000\u0002\u008a\u0001\u0000\u0000\u0000\u0002\u008c\u0001\u0000\u0000"+ - "\u0000\u0002\u008e\u0001\u0000\u0000\u0000\u0002\u0090\u0001\u0000\u0000"+ - "\u0000\u0002\u0092\u0001\u0000\u0000\u0000\u0002\u0094\u0001\u0000\u0000"+ - "\u0000\u0002\u0096\u0001\u0000\u0000\u0000\u0002\u0098\u0001\u0000\u0000"+ - "\u0000\u0002\u009a\u0001\u0000\u0000\u0000\u0002\u009c\u0001\u0000\u0000"+ - "\u0000\u0002\u009e\u0001\u0000\u0000\u0000\u0002\u00a0\u0001\u0000\u0000"+ - "\u0000\u0002\u00a2\u0001\u0000\u0000\u0000\u0002\u00a4\u0001\u0000\u0000"+ - "\u0000\u0002\u00a6\u0001\u0000\u0000\u0000\u0002\u00a8\u0001\u0000\u0000"+ - "\u0000\u0002\u00ac\u0001\u0000\u0000\u0000\u0002\u00ae\u0001\u0000\u0000"+ - "\u0000\u0002\u00b0\u0001\u0000\u0000\u0000\u0002\u00b2\u0001\u0000\u0000"+ - "\u0000\u0003\u00b4\u0001\u0000\u0000\u0000\u0003\u00b6\u0001\u0000\u0000"+ - "\u0000\u0003\u00b8\u0001\u0000\u0000\u0000\u0003\u00ba\u0001\u0000\u0000"+ - "\u0000\u0003\u00bc\u0001\u0000\u0000\u0000\u0003\u00be\u0001\u0000\u0000"+ - "\u0000\u0003\u00c0\u0001\u0000\u0000\u0000\u0003\u00c2\u0001\u0000\u0000"+ - "\u0000\u0003\u00c6\u0001\u0000\u0000\u0000\u0003\u00c8\u0001\u0000\u0000"+ - "\u0000\u0003\u00ca\u0001\u0000\u0000\u0000\u0003\u00cc\u0001\u0000\u0000"+ - "\u0000\u0004\u00ce\u0001\u0000\u0000\u0000\u0004\u00d0\u0001\u0000\u0000"+ - "\u0000\u0004\u00d2\u0001\u0000\u0000\u0000\u0004\u00d8\u0001\u0000\u0000"+ - "\u0000\u0004\u00da\u0001\u0000\u0000\u0000\u0004\u00dc\u0001\u0000\u0000"+ - "\u0000\u0004\u00de\u0001\u0000\u0000\u0000\u0005\u00e0\u0001\u0000\u0000"+ - "\u0000\u0005\u00e2\u0001\u0000\u0000\u0000\u0005\u00e4\u0001\u0000\u0000"+ - "\u0000\u0005\u00e6\u0001\u0000\u0000\u0000\u0005\u00e8\u0001\u0000\u0000"+ - "\u0000\u0005\u00ea\u0001\u0000\u0000\u0000\u0005\u00ec\u0001\u0000\u0000"+ - "\u0000\u0005\u00ee\u0001\u0000\u0000\u0000\u0005\u00f0\u0001\u0000\u0000"+ - "\u0000\u0006\u00f2\u0001\u0000\u0000\u0000\u0006\u00f4\u0001\u0000\u0000"+ - "\u0000\u0006\u00f6\u0001\u0000\u0000\u0000\u0006\u00f8\u0001\u0000\u0000"+ - "\u0000\u0006\u00fc\u0001\u0000\u0000\u0000\u0006\u00fe\u0001\u0000\u0000"+ - "\u0000\u0006\u0100\u0001\u0000\u0000\u0000\u0006\u0102\u0001\u0000\u0000"+ - "\u0000\u0006\u0104\u0001\u0000\u0000\u0000\u0006\u0106\u0001\u0000\u0000"+ - "\u0000\u0007\u0108\u0001\u0000\u0000\u0000\u0007\u010a\u0001\u0000\u0000"+ - "\u0000\u0007\u010c\u0001\u0000\u0000\u0000\u0007\u010e\u0001\u0000\u0000"+ - "\u0000\u0007\u0110\u0001\u0000\u0000\u0000\u0007\u0112\u0001\u0000\u0000"+ - "\u0000\u0007\u0114\u0001\u0000\u0000\u0000\u0007\u0116\u0001\u0000\u0000"+ - "\u0000\u0007\u0118\u0001\u0000\u0000\u0000\u0007\u011a\u0001\u0000\u0000"+ - "\u0000\b\u011c\u0001\u0000\u0000\u0000\b\u011e\u0001\u0000\u0000\u0000"+ - "\b\u0120\u0001\u0000\u0000\u0000\b\u0122\u0001\u0000\u0000\u0000\b\u0124"+ - "\u0001\u0000\u0000\u0000\b\u0126\u0001\u0000\u0000\u0000\b\u0128\u0001"+ - "\u0000\u0000\u0000\t\u012a\u0001\u0000\u0000\u0000\t\u012c\u0001\u0000"+ - "\u0000\u0000\t\u012e\u0001\u0000\u0000\u0000\t\u0130\u0001\u0000\u0000"+ - "\u0000\t\u0132\u0001\u0000\u0000\u0000\n\u0134\u0001\u0000\u0000\u0000"+ - "\n\u0136\u0001\u0000\u0000\u0000\n\u0138\u0001\u0000\u0000\u0000\n\u013a"+ - "\u0001\u0000\u0000\u0000\n\u013c\u0001\u0000\u0000\u0000\u000b\u013e\u0001"+ - "\u0000\u0000\u0000\u000b\u0140\u0001\u0000\u0000\u0000\u000b\u0142\u0001"+ - "\u0000\u0000\u0000\u000b\u0144\u0001\u0000\u0000\u0000\u000b\u0146\u0001"+ - "\u0000\u0000\u0000\u000b\u0148\u0001\u0000\u0000\u0000\f\u014a\u0001\u0000"+ - "\u0000\u0000\u000e\u0154\u0001\u0000\u0000\u0000\u0010\u015b\u0001\u0000"+ - "\u0000\u0000\u0012\u0164\u0001\u0000\u0000\u0000\u0014\u016b\u0001\u0000"+ - "\u0000\u0000\u0016\u0175\u0001\u0000\u0000\u0000\u0018\u017c\u0001\u0000"+ - "\u0000\u0000\u001a\u0183\u0001\u0000\u0000\u0000\u001c\u0191\u0001\u0000"+ - "\u0000\u0000\u001e\u0198\u0001\u0000\u0000\u0000 \u01a0\u0001\u0000\u0000"+ - "\u0000\"\u01a7\u0001\u0000\u0000\u0000$\u01b3\u0001\u0000\u0000\u0000"+ - "&\u01bc\u0001\u0000\u0000\u0000(\u01c2\u0001\u0000\u0000\u0000*\u01c9"+ - "\u0001\u0000\u0000\u0000,\u01d0\u0001\u0000\u0000\u0000.\u01d8\u0001\u0000"+ - "\u0000\u00000\u01e1\u0001\u0000\u0000\u00002\u01e7\u0001\u0000\u0000\u0000"+ - "4\u01f8\u0001\u0000\u0000\u00006\u0208\u0001\u0000\u0000\u00008\u020e"+ - "\u0001\u0000\u0000\u0000:\u0213\u0001\u0000\u0000\u0000<\u0218\u0001\u0000"+ - "\u0000\u0000>\u021c\u0001\u0000\u0000\u0000@\u0220\u0001\u0000\u0000\u0000"+ - "B\u0224\u0001\u0000\u0000\u0000D\u0228\u0001\u0000\u0000\u0000F\u022a"+ - "\u0001\u0000\u0000\u0000H\u022c\u0001\u0000\u0000\u0000J\u022f\u0001\u0000"+ - "\u0000\u0000L\u0231\u0001\u0000\u0000\u0000N\u023a\u0001\u0000\u0000\u0000"+ - "P\u023c\u0001\u0000\u0000\u0000R\u0241\u0001\u0000\u0000\u0000T\u0243"+ - "\u0001\u0000\u0000\u0000V\u0248\u0001\u0000\u0000\u0000X\u0267\u0001\u0000"+ - "\u0000\u0000Z\u026a\u0001\u0000\u0000\u0000\\\u0298\u0001\u0000\u0000"+ - "\u0000^\u029a\u0001\u0000\u0000\u0000`\u029d\u0001\u0000\u0000\u0000b"+ - "\u02a1\u0001\u0000\u0000\u0000d\u02a5\u0001\u0000\u0000\u0000f\u02a7\u0001"+ - "\u0000\u0000\u0000h\u02aa\u0001\u0000\u0000\u0000j\u02ac\u0001\u0000\u0000"+ - "\u0000l\u02b1\u0001\u0000\u0000\u0000n\u02b3\u0001\u0000\u0000\u0000p"+ - "\u02b9\u0001\u0000\u0000\u0000r\u02bf\u0001\u0000\u0000\u0000t\u02c4\u0001"+ - "\u0000\u0000\u0000v\u02c6\u0001\u0000\u0000\u0000x\u02c9\u0001\u0000\u0000"+ - "\u0000z\u02cc\u0001\u0000\u0000\u0000|\u02d1\u0001\u0000\u0000\u0000~"+ - "\u02d5\u0001\u0000\u0000\u0000\u0080\u02da\u0001\u0000\u0000\u0000\u0082"+ - "\u02e0\u0001\u0000\u0000\u0000\u0084\u02e3\u0001\u0000\u0000\u0000\u0086"+ - "\u02e5\u0001\u0000\u0000\u0000\u0088\u02eb\u0001\u0000\u0000\u0000\u008a"+ - "\u02ed\u0001\u0000\u0000\u0000\u008c\u02f2\u0001\u0000\u0000\u0000\u008e"+ - "\u02f5\u0001\u0000\u0000\u0000\u0090\u02f8\u0001\u0000\u0000\u0000\u0092"+ - "\u02fb\u0001\u0000\u0000\u0000\u0094\u02fd\u0001\u0000\u0000\u0000\u0096"+ - "\u0300\u0001\u0000\u0000\u0000\u0098\u0302\u0001\u0000\u0000\u0000\u009a"+ - "\u0305\u0001\u0000\u0000\u0000\u009c\u0307\u0001\u0000\u0000\u0000\u009e"+ - "\u0309\u0001\u0000\u0000\u0000\u00a0\u030b\u0001\u0000\u0000\u0000\u00a2"+ - "\u030d\u0001\u0000\u0000\u0000\u00a4\u030f\u0001\u0000\u0000\u0000\u00a6"+ - "\u0314\u0001\u0000\u0000\u0000\u00a8\u0329\u0001\u0000\u0000\u0000\u00aa"+ - "\u032b\u0001\u0000\u0000\u0000\u00ac\u0333\u0001\u0000\u0000\u0000\u00ae"+ - "\u0335\u0001\u0000\u0000\u0000\u00b0\u0339\u0001\u0000\u0000\u0000\u00b2"+ - "\u033d\u0001\u0000\u0000\u0000\u00b4\u0341\u0001\u0000\u0000\u0000\u00b6"+ - "\u0346\u0001\u0000\u0000\u0000\u00b8\u034a\u0001\u0000\u0000\u0000\u00ba"+ - "\u034e\u0001\u0000\u0000\u0000\u00bc\u0352\u0001\u0000\u0000\u0000\u00be"+ - "\u0356\u0001\u0000\u0000\u0000\u00c0\u035a\u0001\u0000\u0000\u0000\u00c2"+ - "\u0362\u0001\u0000\u0000\u0000\u00c4\u036e\u0001\u0000\u0000\u0000\u00c6"+ - "\u0371\u0001\u0000\u0000\u0000\u00c8\u0375\u0001\u0000\u0000\u0000\u00ca"+ - "\u0379\u0001\u0000\u0000\u0000\u00cc\u037d\u0001\u0000\u0000\u0000\u00ce"+ - "\u0381\u0001\u0000\u0000\u0000\u00d0\u0386\u0001\u0000\u0000\u0000\u00d2"+ - "\u038a\u0001\u0000\u0000\u0000\u00d4\u0392\u0001\u0000\u0000\u0000\u00d6"+ - "\u03a7\u0001\u0000\u0000\u0000\u00d8\u03ab\u0001\u0000\u0000\u0000\u00da"+ - "\u03af\u0001\u0000\u0000\u0000\u00dc\u03b3\u0001\u0000\u0000\u0000\u00de"+ - "\u03b7\u0001\u0000\u0000\u0000\u00e0\u03bb\u0001\u0000\u0000\u0000\u00e2"+ - "\u03c0\u0001\u0000\u0000\u0000\u00e4\u03c4\u0001\u0000\u0000\u0000\u00e6"+ - "\u03c8\u0001\u0000\u0000\u0000\u00e8\u03cc\u0001\u0000\u0000\u0000\u00ea"+ - "\u03cf\u0001\u0000\u0000\u0000\u00ec\u03d3\u0001\u0000\u0000\u0000\u00ee"+ - "\u03d7\u0001\u0000\u0000\u0000\u00f0\u03db\u0001\u0000\u0000\u0000\u00f2"+ - "\u03df\u0001\u0000\u0000\u0000\u00f4\u03e4\u0001\u0000\u0000\u0000\u00f6"+ - "\u03e9\u0001\u0000\u0000\u0000\u00f8\u03ee\u0001\u0000\u0000\u0000\u00fa"+ - "\u03f5\u0001\u0000\u0000\u0000\u00fc\u03fe\u0001\u0000\u0000\u0000\u00fe"+ - "\u0405\u0001\u0000\u0000\u0000\u0100\u0409\u0001\u0000\u0000\u0000\u0102"+ - "\u040d\u0001\u0000\u0000\u0000\u0104\u0411\u0001\u0000\u0000\u0000\u0106"+ - "\u0415\u0001\u0000\u0000\u0000\u0108\u0419\u0001\u0000\u0000\u0000\u010a"+ - "\u041f\u0001\u0000\u0000\u0000\u010c\u0423\u0001\u0000\u0000\u0000\u010e"+ - "\u0427\u0001\u0000\u0000\u0000\u0110\u042b\u0001\u0000\u0000\u0000\u0112"+ - "\u042f\u0001\u0000\u0000\u0000\u0114\u0433\u0001\u0000\u0000\u0000\u0116"+ - "\u0437\u0001\u0000\u0000\u0000\u0118\u043b\u0001\u0000\u0000\u0000\u011a"+ - "\u043f\u0001\u0000\u0000\u0000\u011c\u0443\u0001\u0000\u0000\u0000\u011e"+ - "\u0448\u0001\u0000\u0000\u0000\u0120\u044c\u0001\u0000\u0000\u0000\u0122"+ - "\u0450\u0001\u0000\u0000\u0000\u0124\u0454\u0001\u0000\u0000\u0000\u0126"+ - "\u0458\u0001\u0000\u0000\u0000\u0128\u045c\u0001\u0000\u0000\u0000\u012a"+ - "\u0460\u0001\u0000\u0000\u0000\u012c\u0465\u0001\u0000\u0000\u0000\u012e"+ - "\u046a\u0001\u0000\u0000\u0000\u0130\u046e\u0001\u0000\u0000\u0000\u0132"+ - "\u0472\u0001\u0000\u0000\u0000\u0134\u0476\u0001\u0000\u0000\u0000\u0136"+ - "\u047b\u0001\u0000\u0000\u0000\u0138\u0485\u0001\u0000\u0000\u0000\u013a"+ - "\u0489\u0001\u0000\u0000\u0000\u013c\u048d\u0001\u0000\u0000\u0000\u013e"+ - "\u0491\u0001\u0000\u0000\u0000\u0140\u0496\u0001\u0000\u0000\u0000\u0142"+ - "\u049d\u0001\u0000\u0000\u0000\u0144\u04a1\u0001\u0000\u0000\u0000\u0146"+ - "\u04a5\u0001\u0000\u0000\u0000\u0148\u04a9\u0001\u0000\u0000\u0000\u014a"+ - "\u014b\u0005d\u0000\u0000\u014b\u014c\u0005i\u0000\u0000\u014c\u014d\u0005"+ - "s\u0000\u0000\u014d\u014e\u0005s\u0000\u0000\u014e\u014f\u0005e\u0000"+ - "\u0000\u014f\u0150\u0005c\u0000\u0000\u0150\u0151\u0005t\u0000\u0000\u0151"+ - "\u0152\u0001\u0000\u0000\u0000\u0152\u0153\u0006\u0000\u0000\u0000\u0153"+ - "\r\u0001\u0000\u0000\u0000\u0154\u0155\u0005d\u0000\u0000\u0155\u0156"+ - "\u0005r\u0000\u0000\u0156\u0157\u0005o\u0000\u0000\u0157\u0158\u0005p"+ - "\u0000\u0000\u0158\u0159\u0001\u0000\u0000\u0000\u0159\u015a\u0006\u0001"+ - "\u0001\u0000\u015a\u000f\u0001\u0000\u0000\u0000\u015b\u015c\u0005e\u0000"+ - "\u0000\u015c\u015d\u0005n\u0000\u0000\u015d\u015e\u0005r\u0000\u0000\u015e"+ - "\u015f\u0005i\u0000\u0000\u015f\u0160\u0005c\u0000\u0000\u0160\u0161\u0005"+ - "h\u0000\u0000\u0161\u0162\u0001\u0000\u0000\u0000\u0162\u0163\u0006\u0002"+ - "\u0002\u0000\u0163\u0011\u0001\u0000\u0000\u0000\u0164\u0165\u0005e\u0000"+ - "\u0000\u0165\u0166\u0005v\u0000\u0000\u0166\u0167\u0005a\u0000\u0000\u0167"+ - "\u0168\u0005l\u0000\u0000\u0168\u0169\u0001\u0000\u0000\u0000\u0169\u016a"+ - "\u0006\u0003\u0000\u0000\u016a\u0013\u0001\u0000\u0000\u0000\u016b\u016c"+ - "\u0005e\u0000\u0000\u016c\u016d\u0005x\u0000\u0000\u016d\u016e\u0005p"+ - "\u0000\u0000\u016e\u016f\u0005l\u0000\u0000\u016f\u0170\u0005a\u0000\u0000"+ - "\u0170\u0171\u0005i\u0000\u0000\u0171\u0172\u0005n\u0000\u0000\u0172\u0173"+ - "\u0001\u0000\u0000\u0000\u0173\u0174\u0006\u0004\u0003\u0000\u0174\u0015"+ - "\u0001\u0000\u0000\u0000\u0175\u0176\u0005f\u0000\u0000\u0176\u0177\u0005"+ - "r\u0000\u0000\u0177\u0178\u0005o\u0000\u0000\u0178\u0179\u0005m\u0000"+ - "\u0000\u0179\u017a\u0001\u0000\u0000\u0000\u017a\u017b\u0006\u0005\u0004"+ - "\u0000\u017b\u0017\u0001\u0000\u0000\u0000\u017c\u017d\u0005g\u0000\u0000"+ - "\u017d\u017e\u0005r\u0000\u0000\u017e\u017f\u0005o\u0000\u0000\u017f\u0180"+ - "\u0005k\u0000\u0000\u0180\u0181\u0001\u0000\u0000\u0000\u0181\u0182\u0006"+ - "\u0006\u0000\u0000\u0182\u0019\u0001\u0000\u0000\u0000\u0183\u0184\u0005"+ - "i\u0000\u0000\u0184\u0185\u0005n\u0000\u0000\u0185\u0186\u0005l\u0000"+ - "\u0000\u0186\u0187\u0005i\u0000\u0000\u0187\u0188\u0005n\u0000\u0000\u0188"+ - "\u0189\u0005e\u0000\u0000\u0189\u018a\u0005s\u0000\u0000\u018a\u018b\u0005"+ - "t\u0000\u0000\u018b\u018c\u0005a\u0000\u0000\u018c\u018d\u0005t\u0000"+ - "\u0000\u018d\u018e\u0005s\u0000\u0000\u018e\u018f\u0001\u0000\u0000\u0000"+ - "\u018f\u0190\u0006\u0007\u0000\u0000\u0190\u001b\u0001\u0000\u0000\u0000"+ - "\u0191\u0192\u0005k\u0000\u0000\u0192\u0193\u0005e\u0000\u0000\u0193\u0194"+ - "\u0005e\u0000\u0000\u0194\u0195\u0005p\u0000\u0000\u0195\u0196\u0001\u0000"+ - "\u0000\u0000\u0196\u0197\u0006\b\u0001\u0000\u0197\u001d\u0001\u0000\u0000"+ - "\u0000\u0198\u0199\u0005l\u0000\u0000\u0199\u019a\u0005i\u0000\u0000\u019a"+ - "\u019b\u0005m\u0000\u0000\u019b\u019c\u0005i\u0000\u0000\u019c\u019d\u0005"+ - "t\u0000\u0000\u019d\u019e\u0001\u0000\u0000\u0000\u019e\u019f\u0006\t"+ - "\u0000\u0000\u019f\u001f\u0001\u0000\u0000\u0000\u01a0\u01a1\u0005m\u0000"+ - "\u0000\u01a1\u01a2\u0005e\u0000\u0000\u01a2\u01a3\u0005t\u0000\u0000\u01a3"+ - "\u01a4\u0005a\u0000\u0000\u01a4\u01a5\u0001\u0000\u0000\u0000\u01a5\u01a6"+ - "\u0006\n\u0005\u0000\u01a6!\u0001\u0000\u0000\u0000\u01a7\u01a8\u0005"+ - "m\u0000\u0000\u01a8\u01a9\u0005v\u0000\u0000\u01a9\u01aa\u0005_\u0000"+ - "\u0000\u01aa\u01ab\u0005e\u0000\u0000\u01ab\u01ac\u0005x\u0000\u0000\u01ac"+ - "\u01ad\u0005p\u0000\u0000\u01ad\u01ae\u0005a\u0000\u0000\u01ae\u01af\u0005"+ - "n\u0000\u0000\u01af\u01b0\u0005d\u0000\u0000\u01b0\u01b1\u0001\u0000\u0000"+ - "\u0000\u01b1\u01b2\u0006\u000b\u0006\u0000\u01b2#\u0001\u0000\u0000\u0000"+ - "\u01b3\u01b4\u0005r\u0000\u0000\u01b4\u01b5\u0005e\u0000\u0000\u01b5\u01b6"+ - "\u0005n\u0000\u0000\u01b6\u01b7\u0005a\u0000\u0000\u01b7\u01b8\u0005m"+ - "\u0000\u0000\u01b8\u01b9\u0005e\u0000\u0000\u01b9\u01ba\u0001\u0000\u0000"+ - "\u0000\u01ba\u01bb\u0006\f\u0007\u0000\u01bb%\u0001\u0000\u0000\u0000"+ - "\u01bc\u01bd\u0005r\u0000\u0000\u01bd\u01be\u0005o\u0000\u0000\u01be\u01bf"+ - "\u0005w\u0000\u0000\u01bf\u01c0\u0001\u0000\u0000\u0000\u01c0\u01c1\u0006"+ - "\r\u0000\u0000\u01c1\'\u0001\u0000\u0000\u0000\u01c2\u01c3\u0005s\u0000"+ - "\u0000\u01c3\u01c4\u0005h\u0000\u0000\u01c4\u01c5\u0005o\u0000\u0000\u01c5"+ - "\u01c6\u0005w\u0000\u0000\u01c6\u01c7\u0001\u0000\u0000\u0000\u01c7\u01c8"+ - "\u0006\u000e\b\u0000\u01c8)\u0001\u0000\u0000\u0000\u01c9\u01ca\u0005"+ - "s\u0000\u0000\u01ca\u01cb\u0005o\u0000\u0000\u01cb\u01cc\u0005r\u0000"+ - "\u0000\u01cc\u01cd\u0005t\u0000\u0000\u01cd\u01ce\u0001\u0000\u0000\u0000"+ - "\u01ce\u01cf\u0006\u000f\u0000\u0000\u01cf+\u0001\u0000\u0000\u0000\u01d0"+ - "\u01d1\u0005s\u0000\u0000\u01d1\u01d2\u0005t\u0000\u0000\u01d2\u01d3\u0005"+ - "a\u0000\u0000\u01d3\u01d4\u0005t\u0000\u0000\u01d4\u01d5\u0005s\u0000"+ - "\u0000\u01d5\u01d6\u0001\u0000\u0000\u0000\u01d6\u01d7\u0006\u0010\u0000"+ - "\u0000\u01d7-\u0001\u0000\u0000\u0000\u01d8\u01d9\u0005w\u0000\u0000\u01d9"+ - "\u01da\u0005h\u0000\u0000\u01da\u01db\u0005e\u0000\u0000\u01db\u01dc\u0005"+ - "r\u0000\u0000\u01dc\u01dd\u0005e\u0000\u0000\u01dd\u01de\u0001\u0000\u0000"+ - "\u0000\u01de\u01df\u0006\u0011\u0000\u0000\u01df/\u0001\u0000\u0000\u0000"+ - "\u01e0\u01e2\b\u0000\u0000\u0000\u01e1\u01e0\u0001\u0000\u0000\u0000\u01e2"+ - "\u01e3\u0001\u0000\u0000\u0000\u01e3\u01e1\u0001\u0000\u0000\u0000\u01e3"+ - "\u01e4\u0001\u0000\u0000\u0000\u01e4\u01e5\u0001\u0000\u0000\u0000\u01e5"+ - "\u01e6\u0006\u0012\u0000\u0000\u01e61\u0001\u0000\u0000\u0000\u01e7\u01e8"+ - "\u0005/\u0000\u0000\u01e8\u01e9\u0005/\u0000\u0000\u01e9\u01ed\u0001\u0000"+ - "\u0000\u0000\u01ea\u01ec\b\u0001\u0000\u0000\u01eb\u01ea\u0001\u0000\u0000"+ - "\u0000\u01ec\u01ef\u0001\u0000\u0000\u0000\u01ed\u01eb\u0001\u0000\u0000"+ - "\u0000\u01ed\u01ee\u0001\u0000\u0000\u0000\u01ee\u01f1\u0001\u0000\u0000"+ - "\u0000\u01ef\u01ed\u0001\u0000\u0000\u0000\u01f0\u01f2\u0005\r\u0000\u0000"+ - "\u01f1\u01f0\u0001\u0000\u0000\u0000\u01f1\u01f2\u0001\u0000\u0000\u0000"+ - "\u01f2\u01f4\u0001\u0000\u0000\u0000\u01f3\u01f5\u0005\n\u0000\u0000\u01f4"+ - "\u01f3\u0001\u0000\u0000\u0000\u01f4\u01f5\u0001\u0000\u0000\u0000\u01f5"+ - "\u01f6\u0001\u0000\u0000\u0000\u01f6\u01f7\u0006\u0013\t\u0000\u01f73"+ - "\u0001\u0000\u0000\u0000\u01f8\u01f9\u0005/\u0000\u0000\u01f9\u01fa\u0005"+ - "*\u0000\u0000\u01fa\u01ff\u0001\u0000\u0000\u0000\u01fb\u01fe\u00034\u0014"+ - "\u0000\u01fc\u01fe\t\u0000\u0000\u0000\u01fd\u01fb\u0001\u0000\u0000\u0000"+ - "\u01fd\u01fc\u0001\u0000\u0000\u0000\u01fe\u0201\u0001\u0000\u0000\u0000"+ - "\u01ff\u0200\u0001\u0000\u0000\u0000\u01ff\u01fd\u0001\u0000\u0000\u0000"+ - "\u0200\u0202\u0001\u0000\u0000\u0000\u0201\u01ff\u0001\u0000\u0000\u0000"+ - "\u0202\u0203\u0005*\u0000\u0000\u0203\u0204\u0005/\u0000\u0000\u0204\u0205"+ - "\u0001\u0000\u0000\u0000\u0205\u0206\u0006\u0014\t\u0000\u02065\u0001"+ - "\u0000\u0000\u0000\u0207\u0209\u0007\u0002\u0000\u0000\u0208\u0207\u0001"+ - "\u0000\u0000\u0000\u0209\u020a\u0001\u0000\u0000\u0000\u020a\u0208\u0001"+ - "\u0000\u0000\u0000\u020a\u020b\u0001\u0000\u0000\u0000\u020b\u020c\u0001"+ - "\u0000\u0000\u0000\u020c\u020d\u0006\u0015\t\u0000\u020d7\u0001\u0000"+ - "\u0000\u0000\u020e\u020f\u0003\u00a4L\u0000\u020f\u0210\u0001\u0000\u0000"+ - "\u0000\u0210\u0211\u0006\u0016\n\u0000\u0211\u0212\u0006\u0016\u000b\u0000"+ - "\u02129\u0001\u0000\u0000\u0000\u0213\u0214\u0003B\u001b\u0000\u0214\u0215"+ - "\u0001\u0000\u0000\u0000\u0215\u0216\u0006\u0017\f\u0000\u0216\u0217\u0006"+ - "\u0017\r\u0000\u0217;\u0001\u0000\u0000\u0000\u0218\u0219\u00036\u0015"+ - "\u0000\u0219\u021a\u0001\u0000\u0000\u0000\u021a\u021b\u0006\u0018\t\u0000"+ - "\u021b=\u0001\u0000\u0000\u0000\u021c\u021d\u00032\u0013\u0000\u021d\u021e"+ - "\u0001\u0000\u0000\u0000\u021e\u021f\u0006\u0019\t\u0000\u021f?\u0001"+ - "\u0000\u0000\u0000\u0220\u0221\u00034\u0014\u0000\u0221\u0222\u0001\u0000"+ - "\u0000\u0000\u0222\u0223\u0006\u001a\t\u0000\u0223A\u0001\u0000\u0000"+ - "\u0000\u0224\u0225\u0005|\u0000\u0000\u0225\u0226\u0001\u0000\u0000\u0000"+ - "\u0226\u0227\u0006\u001b\r\u0000\u0227C\u0001\u0000\u0000\u0000\u0228"+ - "\u0229\u0007\u0003\u0000\u0000\u0229E\u0001\u0000\u0000\u0000\u022a\u022b"+ - "\u0007\u0004\u0000\u0000\u022bG\u0001\u0000\u0000\u0000\u022c\u022d\u0005"+ - "\\\u0000\u0000\u022d\u022e\u0007\u0005\u0000\u0000\u022eI\u0001\u0000"+ - "\u0000\u0000\u022f\u0230\b\u0006\u0000\u0000\u0230K\u0001\u0000\u0000"+ - "\u0000\u0231\u0233\u0007\u0007\u0000\u0000\u0232\u0234\u0007\b\u0000\u0000"+ - "\u0233\u0232\u0001\u0000\u0000\u0000\u0233\u0234\u0001\u0000\u0000\u0000"+ - "\u0234\u0236\u0001\u0000\u0000\u0000\u0235\u0237\u0003D\u001c\u0000\u0236"+ - "\u0235\u0001\u0000\u0000\u0000\u0237\u0238\u0001\u0000\u0000\u0000\u0238"+ - "\u0236\u0001\u0000\u0000\u0000\u0238\u0239\u0001\u0000\u0000\u0000\u0239"+ - "M\u0001\u0000\u0000\u0000\u023a\u023b\u0005@\u0000\u0000\u023bO\u0001"+ - "\u0000\u0000\u0000\u023c\u023d\u0005`\u0000\u0000\u023dQ\u0001\u0000\u0000"+ - "\u0000\u023e\u0242\b\t\u0000\u0000\u023f\u0240\u0005`\u0000\u0000\u0240"+ - "\u0242\u0005`\u0000\u0000\u0241\u023e\u0001\u0000\u0000\u0000\u0241\u023f"+ - "\u0001\u0000\u0000\u0000\u0242S\u0001\u0000\u0000\u0000\u0243\u0244\u0005"+ - "_\u0000\u0000\u0244U\u0001\u0000\u0000\u0000\u0245\u0249\u0003F\u001d"+ - "\u0000\u0246\u0249\u0003D\u001c\u0000\u0247\u0249\u0003T$\u0000\u0248"+ - "\u0245\u0001\u0000\u0000\u0000\u0248\u0246\u0001\u0000\u0000\u0000\u0248"+ - "\u0247\u0001\u0000\u0000\u0000\u0249W\u0001\u0000\u0000\u0000\u024a\u024f"+ - "\u0005\"\u0000\u0000\u024b\u024e\u0003H\u001e\u0000\u024c\u024e\u0003"+ - "J\u001f\u0000\u024d\u024b\u0001\u0000\u0000\u0000\u024d\u024c\u0001\u0000"+ - "\u0000\u0000\u024e\u0251\u0001\u0000\u0000\u0000\u024f\u024d\u0001\u0000"+ - "\u0000\u0000\u024f\u0250\u0001\u0000\u0000\u0000\u0250\u0252\u0001\u0000"+ - "\u0000\u0000\u0251\u024f\u0001\u0000\u0000\u0000\u0252\u0268\u0005\"\u0000"+ - "\u0000\u0253\u0254\u0005\"\u0000\u0000\u0254\u0255\u0005\"\u0000\u0000"+ - "\u0255\u0256\u0005\"\u0000\u0000\u0256\u025a\u0001\u0000\u0000\u0000\u0257"+ - "\u0259\b\u0001\u0000\u0000\u0258\u0257\u0001\u0000\u0000\u0000\u0259\u025c"+ - "\u0001\u0000\u0000\u0000\u025a\u025b\u0001\u0000\u0000\u0000\u025a\u0258"+ - "\u0001\u0000\u0000\u0000\u025b\u025d\u0001\u0000\u0000\u0000\u025c\u025a"+ - "\u0001\u0000\u0000\u0000\u025d\u025e\u0005\"\u0000\u0000\u025e\u025f\u0005"+ - "\"\u0000\u0000\u025f\u0260\u0005\"\u0000\u0000\u0260\u0262\u0001\u0000"+ - "\u0000\u0000\u0261\u0263\u0005\"\u0000\u0000\u0262\u0261\u0001\u0000\u0000"+ - "\u0000\u0262\u0263\u0001\u0000\u0000\u0000\u0263\u0265\u0001\u0000\u0000"+ - "\u0000\u0264\u0266\u0005\"\u0000\u0000\u0265\u0264\u0001\u0000\u0000\u0000"+ - "\u0265\u0266\u0001\u0000\u0000\u0000\u0266\u0268\u0001\u0000\u0000\u0000"+ - "\u0267\u024a\u0001\u0000\u0000\u0000\u0267\u0253\u0001\u0000\u0000\u0000"+ - "\u0268Y\u0001\u0000\u0000\u0000\u0269\u026b\u0003D\u001c\u0000\u026a\u0269"+ - "\u0001\u0000\u0000\u0000\u026b\u026c\u0001\u0000\u0000\u0000\u026c\u026a"+ - "\u0001\u0000\u0000\u0000\u026c\u026d\u0001\u0000\u0000\u0000\u026d[\u0001"+ - "\u0000\u0000\u0000\u026e\u0270\u0003D\u001c\u0000\u026f\u026e\u0001\u0000"+ - "\u0000\u0000\u0270\u0271\u0001\u0000\u0000\u0000\u0271\u026f\u0001\u0000"+ - "\u0000\u0000\u0271\u0272\u0001\u0000\u0000\u0000\u0272\u0273\u0001\u0000"+ - "\u0000\u0000\u0273\u0277\u0003l0\u0000\u0274\u0276\u0003D\u001c\u0000"+ - "\u0275\u0274\u0001\u0000\u0000\u0000\u0276\u0279\u0001\u0000\u0000\u0000"+ - "\u0277\u0275\u0001\u0000\u0000\u0000\u0277\u0278\u0001\u0000\u0000\u0000"+ - "\u0278\u0299\u0001\u0000\u0000\u0000\u0279\u0277\u0001\u0000\u0000\u0000"+ - "\u027a\u027c\u0003l0\u0000\u027b\u027d\u0003D\u001c\u0000\u027c\u027b"+ - "\u0001\u0000\u0000\u0000\u027d\u027e\u0001\u0000\u0000\u0000\u027e\u027c"+ - "\u0001\u0000\u0000\u0000\u027e\u027f\u0001\u0000\u0000\u0000\u027f\u0299"+ - "\u0001\u0000\u0000\u0000\u0280\u0282\u0003D\u001c\u0000\u0281\u0280\u0001"+ - "\u0000\u0000\u0000\u0282\u0283\u0001\u0000\u0000\u0000\u0283\u0281\u0001"+ - "\u0000\u0000\u0000\u0283\u0284\u0001\u0000\u0000\u0000\u0284\u028c\u0001"+ - "\u0000\u0000\u0000\u0285\u0289\u0003l0\u0000\u0286\u0288\u0003D\u001c"+ - "\u0000\u0287\u0286\u0001\u0000\u0000\u0000\u0288\u028b\u0001\u0000\u0000"+ - "\u0000\u0289\u0287\u0001\u0000\u0000\u0000\u0289\u028a\u0001\u0000\u0000"+ - "\u0000\u028a\u028d\u0001\u0000\u0000\u0000\u028b\u0289\u0001\u0000\u0000"+ - "\u0000\u028c\u0285\u0001\u0000\u0000\u0000\u028c\u028d\u0001\u0000\u0000"+ - "\u0000\u028d\u028e\u0001\u0000\u0000\u0000\u028e\u028f\u0003L \u0000\u028f"+ - "\u0299\u0001\u0000\u0000\u0000\u0290\u0292\u0003l0\u0000\u0291\u0293\u0003"+ - "D\u001c\u0000\u0292\u0291\u0001\u0000\u0000\u0000\u0293\u0294\u0001\u0000"+ - "\u0000\u0000\u0294\u0292\u0001\u0000\u0000\u0000\u0294\u0295\u0001\u0000"+ - "\u0000\u0000\u0295\u0296\u0001\u0000\u0000\u0000\u0296\u0297\u0003L \u0000"+ - "\u0297\u0299\u0001\u0000\u0000\u0000\u0298\u026f\u0001\u0000\u0000\u0000"+ - "\u0298\u027a\u0001\u0000\u0000\u0000\u0298\u0281\u0001\u0000\u0000\u0000"+ - "\u0298\u0290\u0001\u0000\u0000\u0000\u0299]\u0001\u0000\u0000\u0000\u029a"+ - "\u029b\u0005b\u0000\u0000\u029b\u029c\u0005y\u0000\u0000\u029c_\u0001"+ - "\u0000\u0000\u0000\u029d\u029e\u0005a\u0000\u0000\u029e\u029f\u0005n\u0000"+ - "\u0000\u029f\u02a0\u0005d\u0000\u0000\u02a0a\u0001\u0000\u0000\u0000\u02a1"+ - "\u02a2\u0005a\u0000\u0000\u02a2\u02a3\u0005s\u0000\u0000\u02a3\u02a4\u0005"+ - "c\u0000\u0000\u02a4c\u0001\u0000\u0000\u0000\u02a5\u02a6\u0005=\u0000"+ - "\u0000\u02a6e\u0001\u0000\u0000\u0000\u02a7\u02a8\u0005:\u0000\u0000\u02a8"+ - "\u02a9\u0005:\u0000\u0000\u02a9g\u0001\u0000\u0000\u0000\u02aa\u02ab\u0005"+ - ",\u0000\u0000\u02abi\u0001\u0000\u0000\u0000\u02ac\u02ad\u0005d\u0000"+ - "\u0000\u02ad\u02ae\u0005e\u0000\u0000\u02ae\u02af\u0005s\u0000\u0000\u02af"+ - "\u02b0\u0005c\u0000\u0000\u02b0k\u0001\u0000\u0000\u0000\u02b1\u02b2\u0005"+ - ".\u0000\u0000\u02b2m\u0001\u0000\u0000\u0000\u02b3\u02b4\u0005f\u0000"+ - "\u0000\u02b4\u02b5\u0005a\u0000\u0000\u02b5\u02b6\u0005l\u0000\u0000\u02b6"+ - "\u02b7\u0005s\u0000\u0000\u02b7\u02b8\u0005e\u0000\u0000\u02b8o\u0001"+ - "\u0000\u0000\u0000\u02b9\u02ba\u0005f\u0000\u0000\u02ba\u02bb\u0005i\u0000"+ - "\u0000\u02bb\u02bc\u0005r\u0000\u0000\u02bc\u02bd\u0005s\u0000\u0000\u02bd"+ - "\u02be\u0005t\u0000\u0000\u02beq\u0001\u0000\u0000\u0000\u02bf\u02c0\u0005"+ - "l\u0000\u0000\u02c0\u02c1\u0005a\u0000\u0000\u02c1\u02c2\u0005s\u0000"+ - "\u0000\u02c2\u02c3\u0005t\u0000\u0000\u02c3s\u0001\u0000\u0000\u0000\u02c4"+ - "\u02c5\u0005(\u0000\u0000\u02c5u\u0001\u0000\u0000\u0000\u02c6\u02c7\u0005"+ - "i\u0000\u0000\u02c7\u02c8\u0005n\u0000\u0000\u02c8w\u0001\u0000\u0000"+ - "\u0000\u02c9\u02ca\u0005i\u0000\u0000\u02ca\u02cb\u0005s\u0000\u0000\u02cb"+ - "y\u0001\u0000\u0000\u0000\u02cc\u02cd\u0005l\u0000\u0000\u02cd\u02ce\u0005"+ - "i\u0000\u0000\u02ce\u02cf\u0005k\u0000\u0000\u02cf\u02d0\u0005e\u0000"+ - "\u0000\u02d0{\u0001\u0000\u0000\u0000\u02d1\u02d2\u0005n\u0000\u0000\u02d2"+ - "\u02d3\u0005o\u0000\u0000\u02d3\u02d4\u0005t\u0000\u0000\u02d4}\u0001"+ - "\u0000\u0000\u0000\u02d5\u02d6\u0005n\u0000\u0000\u02d6\u02d7\u0005u\u0000"+ - "\u0000\u02d7\u02d8\u0005l\u0000\u0000\u02d8\u02d9\u0005l\u0000\u0000\u02d9"+ - "\u007f\u0001\u0000\u0000\u0000\u02da\u02db\u0005n\u0000\u0000\u02db\u02dc"+ - "\u0005u\u0000\u0000\u02dc\u02dd\u0005l\u0000\u0000\u02dd\u02de\u0005l"+ - "\u0000\u0000\u02de\u02df\u0005s\u0000\u0000\u02df\u0081\u0001\u0000\u0000"+ - "\u0000\u02e0\u02e1\u0005o\u0000\u0000\u02e1\u02e2\u0005r\u0000\u0000\u02e2"+ - "\u0083\u0001\u0000\u0000\u0000\u02e3\u02e4\u0005?\u0000\u0000\u02e4\u0085"+ - "\u0001\u0000\u0000\u0000\u02e5\u02e6\u0005r\u0000\u0000\u02e6\u02e7\u0005"+ - "l\u0000\u0000\u02e7\u02e8\u0005i\u0000\u0000\u02e8\u02e9\u0005k\u0000"+ - "\u0000\u02e9\u02ea\u0005e\u0000\u0000\u02ea\u0087\u0001\u0000\u0000\u0000"+ - "\u02eb\u02ec\u0005)\u0000\u0000\u02ec\u0089\u0001\u0000\u0000\u0000\u02ed"+ - "\u02ee\u0005t\u0000\u0000\u02ee\u02ef\u0005r\u0000\u0000\u02ef\u02f0\u0005"+ - "u\u0000\u0000\u02f0\u02f1\u0005e\u0000\u0000\u02f1\u008b\u0001\u0000\u0000"+ - "\u0000\u02f2\u02f3\u0005=\u0000\u0000\u02f3\u02f4\u0005=\u0000\u0000\u02f4"+ - "\u008d\u0001\u0000\u0000\u0000\u02f5\u02f6\u0005=\u0000\u0000\u02f6\u02f7"+ - "\u0005~\u0000\u0000\u02f7\u008f\u0001\u0000\u0000\u0000\u02f8\u02f9\u0005"+ - "!\u0000\u0000\u02f9\u02fa\u0005=\u0000\u0000\u02fa\u0091\u0001\u0000\u0000"+ - "\u0000\u02fb\u02fc\u0005<\u0000\u0000\u02fc\u0093\u0001\u0000\u0000\u0000"+ - "\u02fd\u02fe\u0005<\u0000\u0000\u02fe\u02ff\u0005=\u0000\u0000\u02ff\u0095"+ - "\u0001\u0000\u0000\u0000\u0300\u0301\u0005>\u0000\u0000\u0301\u0097\u0001"+ - "\u0000\u0000\u0000\u0302\u0303\u0005>\u0000\u0000\u0303\u0304\u0005=\u0000"+ - "\u0000\u0304\u0099\u0001\u0000\u0000\u0000\u0305\u0306\u0005+\u0000\u0000"+ - "\u0306\u009b\u0001\u0000\u0000\u0000\u0307\u0308\u0005-\u0000\u0000\u0308"+ - "\u009d\u0001\u0000\u0000\u0000\u0309\u030a\u0005*\u0000\u0000\u030a\u009f"+ - "\u0001\u0000\u0000\u0000\u030b\u030c\u0005/\u0000\u0000\u030c\u00a1\u0001"+ - "\u0000\u0000\u0000\u030d\u030e\u0005%\u0000\u0000\u030e\u00a3\u0001\u0000"+ - "\u0000\u0000\u030f\u0310\u0005[\u0000\u0000\u0310\u0311\u0001\u0000\u0000"+ - "\u0000\u0311\u0312\u0006L\u0000\u0000\u0312\u0313\u0006L\u0000\u0000\u0313"+ - "\u00a5\u0001\u0000\u0000\u0000\u0314\u0315\u0005]\u0000\u0000\u0315\u0316"+ - "\u0001\u0000\u0000\u0000\u0316\u0317\u0006M\r\u0000\u0317\u0318\u0006"+ - "M\r\u0000\u0318\u00a7\u0001\u0000\u0000\u0000\u0319\u031d\u0003F\u001d"+ - "\u0000\u031a\u031c\u0003V%\u0000\u031b\u031a\u0001\u0000\u0000\u0000\u031c"+ - "\u031f\u0001\u0000\u0000\u0000\u031d\u031b\u0001\u0000\u0000\u0000\u031d"+ - "\u031e\u0001\u0000\u0000\u0000\u031e\u032a\u0001\u0000\u0000\u0000\u031f"+ - "\u031d\u0001\u0000\u0000\u0000\u0320\u0323\u0003T$\u0000\u0321\u0323\u0003"+ - "N!\u0000\u0322\u0320\u0001\u0000\u0000\u0000\u0322\u0321\u0001\u0000\u0000"+ - "\u0000\u0323\u0325\u0001\u0000\u0000\u0000\u0324\u0326\u0003V%\u0000\u0325"+ - "\u0324\u0001\u0000\u0000\u0000\u0326\u0327\u0001\u0000\u0000\u0000\u0327"+ - "\u0325\u0001\u0000\u0000\u0000\u0327\u0328\u0001\u0000\u0000\u0000\u0328"+ - "\u032a\u0001\u0000\u0000\u0000\u0329\u0319\u0001\u0000\u0000\u0000\u0329"+ - "\u0322\u0001\u0000\u0000\u0000\u032a\u00a9\u0001\u0000\u0000\u0000\u032b"+ - "\u032d\u0003P\"\u0000\u032c\u032e\u0003R#\u0000\u032d\u032c\u0001\u0000"+ - "\u0000\u0000\u032e\u032f\u0001\u0000\u0000\u0000\u032f\u032d\u0001\u0000"+ - "\u0000\u0000\u032f\u0330\u0001\u0000\u0000\u0000\u0330\u0331\u0001\u0000"+ - "\u0000\u0000\u0331\u0332\u0003P\"\u0000\u0332\u00ab\u0001\u0000\u0000"+ - "\u0000\u0333\u0334\u0003\u00aaO\u0000\u0334\u00ad\u0001\u0000\u0000\u0000"+ - "\u0335\u0336\u00032\u0013\u0000\u0336\u0337\u0001\u0000\u0000\u0000\u0337"+ - "\u0338\u0006Q\t\u0000\u0338\u00af\u0001\u0000\u0000\u0000\u0339\u033a"+ - "\u00034\u0014\u0000\u033a\u033b\u0001\u0000\u0000\u0000\u033b\u033c\u0006"+ - "R\t\u0000\u033c\u00b1\u0001\u0000\u0000\u0000\u033d\u033e\u00036\u0015"+ - "\u0000\u033e\u033f\u0001\u0000\u0000\u0000\u033f\u0340\u0006S\t\u0000"+ - "\u0340\u00b3\u0001\u0000\u0000\u0000\u0341\u0342\u0003B\u001b\u0000\u0342"+ - "\u0343\u0001\u0000\u0000\u0000\u0343\u0344\u0006T\f\u0000\u0344\u0345"+ - "\u0006T\r\u0000\u0345\u00b5\u0001\u0000\u0000\u0000\u0346\u0347\u0003"+ - "\u00a4L\u0000\u0347\u0348\u0001\u0000\u0000\u0000\u0348\u0349\u0006U\n"+ - "\u0000\u0349\u00b7\u0001\u0000\u0000\u0000\u034a\u034b\u0003\u00a6M\u0000"+ - "\u034b\u034c\u0001\u0000\u0000\u0000\u034c\u034d\u0006V\u000e\u0000\u034d"+ - "\u00b9\u0001\u0000\u0000\u0000\u034e\u034f\u0003h.\u0000\u034f\u0350\u0001"+ - "\u0000\u0000\u0000\u0350\u0351\u0006W\u000f\u0000\u0351\u00bb\u0001\u0000"+ - "\u0000\u0000\u0352\u0353\u0003d,\u0000\u0353\u0354\u0001\u0000\u0000\u0000"+ - "\u0354\u0355\u0006X\u0010\u0000\u0355\u00bd\u0001\u0000\u0000\u0000\u0356"+ - "\u0357\u0003X&\u0000\u0357\u0358\u0001\u0000\u0000\u0000\u0358\u0359\u0006"+ - "Y\u0011\u0000\u0359\u00bf\u0001\u0000\u0000\u0000\u035a\u035b\u0005o\u0000"+ - "\u0000\u035b\u035c\u0005p\u0000\u0000\u035c\u035d\u0005t\u0000\u0000\u035d"+ - "\u035e\u0005i\u0000\u0000\u035e\u035f\u0005o\u0000\u0000\u035f\u0360\u0005"+ - "n\u0000\u0000\u0360\u0361\u0005s\u0000\u0000\u0361\u00c1\u0001\u0000\u0000"+ - "\u0000\u0362\u0363\u0005m\u0000\u0000\u0363\u0364\u0005e\u0000\u0000\u0364"+ - "\u0365\u0005t\u0000\u0000\u0365\u0366\u0005a\u0000\u0000\u0366\u0367\u0005"+ - "d\u0000\u0000\u0367\u0368\u0005a\u0000\u0000\u0368\u0369\u0005t\u0000"+ - "\u0000\u0369\u036a\u0005a\u0000\u0000\u036a\u00c3\u0001\u0000\u0000\u0000"+ - "\u036b\u036f\b\n\u0000\u0000\u036c\u036d\u0005/\u0000\u0000\u036d\u036f"+ - "\b\u000b\u0000\u0000\u036e\u036b\u0001\u0000\u0000\u0000\u036e\u036c\u0001"+ - "\u0000\u0000\u0000\u036f\u00c5\u0001\u0000\u0000\u0000\u0370\u0372\u0003"+ - "\u00c4\\\u0000\u0371\u0370\u0001\u0000\u0000\u0000\u0372\u0373\u0001\u0000"+ - "\u0000\u0000\u0373\u0371\u0001\u0000\u0000\u0000\u0373\u0374\u0001\u0000"+ - "\u0000\u0000\u0374\u00c7\u0001\u0000\u0000\u0000\u0375\u0376\u00032\u0013"+ - "\u0000\u0376\u0377\u0001\u0000\u0000\u0000\u0377\u0378\u0006^\t\u0000"+ - "\u0378\u00c9\u0001\u0000\u0000\u0000\u0379\u037a\u00034\u0014\u0000\u037a"+ - "\u037b\u0001\u0000\u0000\u0000\u037b\u037c\u0006_\t\u0000\u037c\u00cb"+ - "\u0001\u0000\u0000\u0000\u037d\u037e\u00036\u0015\u0000\u037e\u037f\u0001"+ - "\u0000\u0000\u0000\u037f\u0380\u0006`\t\u0000\u0380\u00cd\u0001\u0000"+ - "\u0000\u0000\u0381\u0382\u0003B\u001b\u0000\u0382\u0383\u0001\u0000\u0000"+ - "\u0000\u0383\u0384\u0006a\f\u0000\u0384\u0385\u0006a\r\u0000\u0385\u00cf"+ - "\u0001\u0000\u0000\u0000\u0386\u0387\u0003l0\u0000\u0387\u0388\u0001\u0000"+ - "\u0000\u0000\u0388\u0389\u0006b\u0012\u0000\u0389\u00d1\u0001\u0000\u0000"+ - "\u0000\u038a\u038b\u0003h.\u0000\u038b\u038c\u0001\u0000\u0000\u0000\u038c"+ - "\u038d\u0006c\u000f\u0000\u038d\u00d3\u0001\u0000\u0000\u0000\u038e\u0393"+ - "\u0003F\u001d\u0000\u038f\u0393\u0003D\u001c\u0000\u0390\u0393\u0003T"+ - "$\u0000\u0391\u0393\u0003\u009eI\u0000\u0392\u038e\u0001\u0000\u0000\u0000"+ - "\u0392\u038f\u0001\u0000\u0000\u0000\u0392\u0390\u0001\u0000\u0000\u0000"+ - "\u0392\u0391\u0001\u0000\u0000\u0000\u0393\u00d5\u0001\u0000\u0000\u0000"+ - "\u0394\u0397\u0003F\u001d\u0000\u0395\u0397\u0003\u009eI\u0000\u0396\u0394"+ - "\u0001\u0000\u0000\u0000\u0396\u0395\u0001\u0000\u0000\u0000\u0397\u039b"+ - "\u0001\u0000\u0000\u0000\u0398\u039a\u0003\u00d4d\u0000\u0399\u0398\u0001"+ - "\u0000\u0000\u0000\u039a\u039d\u0001\u0000\u0000\u0000\u039b\u0399\u0001"+ - "\u0000\u0000\u0000\u039b\u039c\u0001\u0000\u0000\u0000\u039c\u03a8\u0001"+ - "\u0000\u0000\u0000\u039d\u039b\u0001\u0000\u0000\u0000\u039e\u03a1\u0003"+ - "T$\u0000\u039f\u03a1\u0003N!\u0000\u03a0\u039e\u0001\u0000\u0000\u0000"+ - "\u03a0\u039f\u0001\u0000\u0000\u0000\u03a1\u03a3\u0001\u0000\u0000\u0000"+ - "\u03a2\u03a4\u0003\u00d4d\u0000\u03a3\u03a2\u0001\u0000\u0000\u0000\u03a4"+ - "\u03a5\u0001\u0000\u0000\u0000\u03a5\u03a3\u0001\u0000\u0000\u0000\u03a5"+ - "\u03a6\u0001\u0000\u0000\u0000\u03a6\u03a8\u0001\u0000\u0000\u0000\u03a7"+ - "\u0396\u0001\u0000\u0000\u0000\u03a7\u03a0\u0001\u0000\u0000\u0000\u03a8"+ - "\u00d7\u0001\u0000\u0000\u0000\u03a9\u03ac\u0003\u00d6e\u0000\u03aa\u03ac"+ - "\u0003\u00aaO\u0000\u03ab\u03a9\u0001\u0000\u0000\u0000\u03ab\u03aa\u0001"+ - "\u0000\u0000\u0000\u03ac\u03ad\u0001\u0000\u0000\u0000\u03ad\u03ab\u0001"+ - "\u0000\u0000\u0000\u03ad\u03ae\u0001\u0000\u0000\u0000\u03ae\u00d9\u0001"+ - "\u0000\u0000\u0000\u03af\u03b0\u00032\u0013\u0000\u03b0\u03b1\u0001\u0000"+ - "\u0000\u0000\u03b1\u03b2\u0006g\t\u0000\u03b2\u00db\u0001\u0000\u0000"+ - "\u0000\u03b3\u03b4\u00034\u0014\u0000\u03b4\u03b5\u0001\u0000\u0000\u0000"+ - "\u03b5\u03b6\u0006h\t\u0000\u03b6\u00dd\u0001\u0000\u0000\u0000\u03b7"+ - "\u03b8\u00036\u0015\u0000\u03b8\u03b9\u0001\u0000\u0000\u0000\u03b9\u03ba"+ - "\u0006i\t\u0000\u03ba\u00df\u0001\u0000\u0000\u0000\u03bb\u03bc\u0003"+ - "B\u001b\u0000\u03bc\u03bd\u0001\u0000\u0000\u0000\u03bd\u03be\u0006j\f"+ - "\u0000\u03be\u03bf\u0006j\r\u0000\u03bf\u00e1\u0001\u0000\u0000\u0000"+ - "\u03c0\u03c1\u0003d,\u0000\u03c1\u03c2\u0001\u0000\u0000\u0000\u03c2\u03c3"+ - "\u0006k\u0010\u0000\u03c3\u00e3\u0001\u0000\u0000\u0000\u03c4\u03c5\u0003"+ - "h.\u0000\u03c5\u03c6\u0001\u0000\u0000\u0000\u03c6\u03c7\u0006l\u000f"+ - "\u0000\u03c7\u00e5\u0001\u0000\u0000\u0000\u03c8\u03c9\u0003l0\u0000\u03c9"+ - "\u03ca\u0001\u0000\u0000\u0000\u03ca\u03cb\u0006m\u0012\u0000\u03cb\u00e7"+ - "\u0001\u0000\u0000\u0000\u03cc\u03cd\u0005a\u0000\u0000\u03cd\u03ce\u0005"+ - "s\u0000\u0000\u03ce\u00e9\u0001\u0000\u0000\u0000\u03cf\u03d0\u0003\u00d8"+ - "f\u0000\u03d0\u03d1\u0001\u0000\u0000\u0000\u03d1\u03d2\u0006o\u0013\u0000"+ - "\u03d2\u00eb\u0001\u0000\u0000\u0000\u03d3\u03d4\u00032\u0013\u0000\u03d4"+ - "\u03d5\u0001\u0000\u0000\u0000\u03d5\u03d6\u0006p\t\u0000\u03d6\u00ed"+ - "\u0001\u0000\u0000\u0000\u03d7\u03d8\u00034\u0014\u0000\u03d8\u03d9\u0001"+ - "\u0000\u0000\u0000\u03d9\u03da\u0006q\t\u0000\u03da\u00ef\u0001\u0000"+ - "\u0000\u0000\u03db\u03dc\u00036\u0015\u0000\u03dc\u03dd\u0001\u0000\u0000"+ - "\u0000\u03dd\u03de\u0006r\t\u0000\u03de\u00f1\u0001\u0000\u0000\u0000"+ - "\u03df\u03e0\u0003B\u001b\u0000\u03e0\u03e1\u0001\u0000\u0000\u0000\u03e1"+ - "\u03e2\u0006s\f\u0000\u03e2\u03e3\u0006s\r\u0000\u03e3\u00f3\u0001\u0000"+ - "\u0000\u0000\u03e4\u03e5\u0003\u00a4L\u0000\u03e5\u03e6\u0001\u0000\u0000"+ - "\u0000\u03e6\u03e7\u0006t\n\u0000\u03e7\u03e8\u0006t\u0014\u0000\u03e8"+ - "\u00f5\u0001\u0000\u0000\u0000\u03e9\u03ea\u0005o\u0000\u0000\u03ea\u03eb"+ - "\u0005n\u0000\u0000\u03eb\u03ec\u0001\u0000\u0000\u0000\u03ec\u03ed\u0006"+ - "u\u0015\u0000\u03ed\u00f7\u0001\u0000\u0000\u0000\u03ee\u03ef\u0005w\u0000"+ - "\u0000\u03ef\u03f0\u0005i\u0000\u0000\u03f0\u03f1\u0005t\u0000\u0000\u03f1"+ - "\u03f2\u0005h\u0000\u0000\u03f2\u03f3\u0001\u0000\u0000\u0000\u03f3\u03f4"+ - "\u0006v\u0015\u0000\u03f4\u00f9\u0001\u0000\u0000\u0000\u03f5\u03f6\b"+ - "\f\u0000\u0000\u03f6\u00fb\u0001\u0000\u0000\u0000\u03f7\u03f9\u0003\u00fa"+ - "w\u0000\u03f8\u03f7\u0001\u0000\u0000\u0000\u03f9\u03fa\u0001\u0000\u0000"+ - "\u0000\u03fa\u03f8\u0001\u0000\u0000\u0000\u03fa\u03fb\u0001\u0000\u0000"+ - "\u0000\u03fb\u03fc\u0001\u0000\u0000\u0000\u03fc\u03fd\u0003\u0140\u009a"+ - "\u0000\u03fd\u03ff\u0001\u0000\u0000\u0000\u03fe\u03f8\u0001\u0000\u0000"+ - "\u0000\u03fe\u03ff\u0001\u0000\u0000\u0000\u03ff\u0401\u0001\u0000\u0000"+ - "\u0000\u0400\u0402\u0003\u00faw\u0000\u0401\u0400\u0001\u0000\u0000\u0000"+ - "\u0402\u0403\u0001\u0000\u0000\u0000\u0403\u0401\u0001\u0000\u0000\u0000"+ - "\u0403\u0404\u0001\u0000\u0000\u0000\u0404\u00fd\u0001\u0000\u0000\u0000"+ - "\u0405\u0406\u0003\u00acP\u0000\u0406\u0407\u0001\u0000\u0000\u0000\u0407"+ - "\u0408\u0006y\u0016\u0000\u0408\u00ff\u0001\u0000\u0000\u0000\u0409\u040a"+ - "\u0003\u00fcx\u0000\u040a\u040b\u0001\u0000\u0000\u0000\u040b\u040c\u0006"+ - "z\u0017\u0000\u040c\u0101\u0001\u0000\u0000\u0000\u040d\u040e\u00032\u0013"+ - "\u0000\u040e\u040f\u0001\u0000\u0000\u0000\u040f\u0410\u0006{\t\u0000"+ - "\u0410\u0103\u0001\u0000\u0000\u0000\u0411\u0412\u00034\u0014\u0000\u0412"+ - "\u0413\u0001\u0000\u0000\u0000\u0413\u0414\u0006|\t\u0000\u0414\u0105"+ - "\u0001\u0000\u0000\u0000\u0415\u0416\u00036\u0015\u0000\u0416\u0417\u0001"+ - "\u0000\u0000\u0000\u0417\u0418\u0006}\t\u0000\u0418\u0107\u0001\u0000"+ - "\u0000\u0000\u0419\u041a\u0003B\u001b\u0000\u041a\u041b\u0001\u0000\u0000"+ - "\u0000\u041b\u041c\u0006~\f\u0000\u041c\u041d\u0006~\r\u0000\u041d\u041e"+ - "\u0006~\r\u0000\u041e\u0109\u0001\u0000\u0000\u0000\u041f\u0420\u0003"+ - "d,\u0000\u0420\u0421\u0001\u0000\u0000\u0000\u0421\u0422\u0006\u007f\u0010"+ - "\u0000\u0422\u010b\u0001\u0000\u0000\u0000\u0423\u0424\u0003h.\u0000\u0424"+ - "\u0425\u0001\u0000\u0000\u0000\u0425\u0426\u0006\u0080\u000f\u0000\u0426"+ - "\u010d\u0001\u0000\u0000\u0000\u0427\u0428\u0003l0\u0000\u0428\u0429\u0001"+ - "\u0000\u0000\u0000\u0429\u042a\u0006\u0081\u0012\u0000\u042a\u010f\u0001"+ - "\u0000\u0000\u0000\u042b\u042c\u0003\u00f8v\u0000\u042c\u042d\u0001\u0000"+ - "\u0000\u0000\u042d\u042e\u0006\u0082\u0018\u0000\u042e\u0111\u0001\u0000"+ - "\u0000\u0000\u042f\u0430\u0003\u00d8f\u0000\u0430\u0431\u0001\u0000\u0000"+ - "\u0000\u0431\u0432\u0006\u0083\u0013\u0000\u0432\u0113\u0001\u0000\u0000"+ - "\u0000\u0433\u0434\u0003\u00acP\u0000\u0434\u0435\u0001\u0000\u0000\u0000"+ - "\u0435\u0436\u0006\u0084\u0016\u0000\u0436\u0115\u0001\u0000\u0000\u0000"+ - "\u0437\u0438\u00032\u0013\u0000\u0438\u0439\u0001\u0000\u0000\u0000\u0439"+ - "\u043a\u0006\u0085\t\u0000\u043a\u0117\u0001\u0000\u0000\u0000\u043b\u043c"+ - "\u00034\u0014\u0000\u043c\u043d\u0001\u0000\u0000\u0000\u043d\u043e\u0006"+ - "\u0086\t\u0000\u043e\u0119\u0001\u0000\u0000\u0000\u043f\u0440\u00036"+ - "\u0015\u0000\u0440\u0441\u0001\u0000\u0000\u0000\u0441\u0442\u0006\u0087"+ - "\t\u0000\u0442\u011b\u0001\u0000\u0000\u0000\u0443\u0444\u0003B\u001b"+ - "\u0000\u0444\u0445\u0001\u0000\u0000\u0000\u0445\u0446\u0006\u0088\f\u0000"+ - "\u0446\u0447\u0006\u0088\r\u0000\u0447\u011d\u0001\u0000\u0000\u0000\u0448"+ - "\u0449\u0003l0\u0000\u0449\u044a\u0001\u0000\u0000\u0000\u044a\u044b\u0006"+ - "\u0089\u0012\u0000\u044b\u011f\u0001\u0000\u0000\u0000\u044c\u044d\u0003"+ - "\u00acP\u0000\u044d\u044e\u0001\u0000\u0000\u0000\u044e\u044f\u0006\u008a"+ - "\u0016\u0000\u044f\u0121\u0001\u0000\u0000\u0000\u0450\u0451\u0003\u00a8"+ - "N\u0000\u0451\u0452\u0001\u0000\u0000\u0000\u0452\u0453\u0006\u008b\u0019"+ - "\u0000\u0453\u0123\u0001\u0000\u0000\u0000\u0454\u0455\u00032\u0013\u0000"+ - "\u0455\u0456\u0001\u0000\u0000\u0000\u0456\u0457\u0006\u008c\t\u0000\u0457"+ - "\u0125\u0001\u0000\u0000\u0000\u0458\u0459\u00034\u0014\u0000\u0459\u045a"+ - "\u0001\u0000\u0000\u0000\u045a\u045b\u0006\u008d\t\u0000\u045b\u0127\u0001"+ - "\u0000\u0000\u0000\u045c\u045d\u00036\u0015\u0000\u045d\u045e\u0001\u0000"+ - "\u0000\u0000\u045e\u045f\u0006\u008e\t\u0000\u045f\u0129\u0001\u0000\u0000"+ - "\u0000\u0460\u0461\u0003B\u001b\u0000\u0461\u0462\u0001\u0000\u0000\u0000"+ - "\u0462\u0463\u0006\u008f\f\u0000\u0463\u0464\u0006\u008f\r\u0000\u0464"+ - "\u012b\u0001\u0000\u0000\u0000\u0465\u0466\u0005i\u0000\u0000\u0466\u0467"+ - "\u0005n\u0000\u0000\u0467\u0468\u0005f\u0000\u0000\u0468\u0469\u0005o"+ - "\u0000\u0000\u0469\u012d\u0001\u0000\u0000\u0000\u046a\u046b\u00032\u0013"+ - "\u0000\u046b\u046c\u0001\u0000\u0000\u0000\u046c\u046d\u0006\u0091\t\u0000"+ - "\u046d\u012f\u0001\u0000\u0000\u0000\u046e\u046f\u00034\u0014\u0000\u046f"+ - "\u0470\u0001\u0000\u0000\u0000\u0470\u0471\u0006\u0092\t\u0000\u0471\u0131"+ - "\u0001\u0000\u0000\u0000\u0472\u0473\u00036\u0015\u0000\u0473\u0474\u0001"+ - "\u0000\u0000\u0000\u0474\u0475\u0006\u0093\t\u0000\u0475\u0133\u0001\u0000"+ - "\u0000\u0000\u0476\u0477\u0003B\u001b\u0000\u0477\u0478\u0001\u0000\u0000"+ - "\u0000\u0478\u0479\u0006\u0094\f\u0000\u0479\u047a\u0006\u0094\r\u0000"+ - "\u047a\u0135\u0001\u0000\u0000\u0000\u047b\u047c\u0005f\u0000\u0000\u047c"+ - "\u047d\u0005u\u0000\u0000\u047d\u047e\u0005n\u0000\u0000\u047e\u047f\u0005"+ - "c\u0000\u0000\u047f\u0480\u0005t\u0000\u0000\u0480\u0481\u0005i\u0000"+ - "\u0000\u0481\u0482\u0005o\u0000\u0000\u0482\u0483\u0005n\u0000\u0000\u0483"+ - "\u0484\u0005s\u0000\u0000\u0484\u0137\u0001\u0000\u0000\u0000\u0485\u0486"+ - "\u00032\u0013\u0000\u0486\u0487\u0001\u0000\u0000\u0000\u0487\u0488\u0006"+ - "\u0096\t\u0000\u0488\u0139\u0001\u0000\u0000\u0000\u0489\u048a\u00034"+ - "\u0014\u0000\u048a\u048b\u0001\u0000\u0000\u0000\u048b\u048c\u0006\u0097"+ - "\t\u0000\u048c\u013b\u0001\u0000\u0000\u0000\u048d\u048e\u00036\u0015"+ - "\u0000\u048e\u048f\u0001\u0000\u0000\u0000\u048f\u0490\u0006\u0098\t\u0000"+ - "\u0490\u013d\u0001\u0000\u0000\u0000\u0491\u0492\u0003\u00a6M\u0000\u0492"+ - "\u0493\u0001\u0000\u0000\u0000\u0493\u0494\u0006\u0099\u000e\u0000\u0494"+ - "\u0495\u0006\u0099\r\u0000\u0495\u013f\u0001\u0000\u0000\u0000\u0496\u0497"+ - "\u0005:\u0000\u0000\u0497\u0141\u0001\u0000\u0000\u0000\u0498\u049e\u0003"+ - "N!\u0000\u0499\u049e\u0003D\u001c\u0000\u049a\u049e\u0003l0\u0000\u049b"+ - "\u049e\u0003F\u001d\u0000\u049c\u049e\u0003T$\u0000\u049d\u0498\u0001"+ - "\u0000\u0000\u0000\u049d\u0499\u0001\u0000\u0000\u0000\u049d\u049a\u0001"+ - "\u0000\u0000\u0000\u049d\u049b\u0001\u0000\u0000\u0000\u049d\u049c\u0001"+ - "\u0000\u0000\u0000\u049e\u049f\u0001\u0000\u0000\u0000\u049f\u049d\u0001"+ - "\u0000\u0000\u0000\u049f\u04a0\u0001\u0000\u0000\u0000\u04a0\u0143\u0001"+ - "\u0000\u0000\u0000\u04a1\u04a2\u00032\u0013\u0000\u04a2\u04a3\u0001\u0000"+ - "\u0000\u0000\u04a3\u04a4\u0006\u009c\t\u0000\u04a4\u0145\u0001\u0000\u0000"+ - "\u0000\u04a5\u04a6\u00034\u0014\u0000\u04a6\u04a7\u0001\u0000\u0000\u0000"+ - "\u04a7\u04a8\u0006\u009d\t\u0000\u04a8\u0147\u0001\u0000\u0000\u0000\u04a9"+ - "\u04aa\u00036\u0015\u0000\u04aa\u04ab\u0001\u0000\u0000\u0000\u04ab\u04ac"+ - "\u0006\u009e\t\u0000\u04ac\u0149\u0001\u0000\u0000\u0000:\u0000\u0001"+ - "\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\u01e3\u01ed\u01f1\u01f4"+ - "\u01fd\u01ff\u020a\u0233\u0238\u0241\u0248\u024d\u024f\u025a\u0262\u0265"+ - "\u0267\u026c\u0271\u0277\u027e\u0283\u0289\u028c\u0294\u0298\u031d\u0322"+ - "\u0327\u0329\u032f\u036e\u0373\u0392\u0396\u039b\u03a0\u03a5\u03a7\u03ab"+ - "\u03ad\u03fa\u03fe\u0403\u049d\u049f\u001a\u0005\u0002\u0000\u0005\u0004"+ - "\u0000\u0005\u0006\u0000\u0005\u0001\u0000\u0005\u0003\u0000\u0005\n\u0000"+ - "\u0005\b\u0000\u0005\u0005\u0000\u0005\t\u0000\u0000\u0001\u0000\u0007"+ - "A\u0000\u0005\u0000\u0000\u0007\u001a\u0000\u0004\u0000\u0000\u0007B\u0000"+ - "\u0007#\u0000\u0007!\u0000\u0007\u001b\u0000\u0007%\u0000\u0007N\u0000"+ - "\u0005\u000b\u0000\u0005\u0007\u0000\u0007D\u0000\u0007X\u0000\u0007W"+ - "\u0000\u0007C\u0000"; + "\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ + "\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ + "\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ + "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0001\u0012\u0001"+ + "\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001"+ + "\u0013\u0004\u0013\u020c\b\u0013\u000b\u0013\f\u0013\u020d\u0001\u0013"+ + "\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0005\u0014"+ + "\u0216\b\u0014\n\u0014\f\u0014\u0219\t\u0014\u0001\u0014\u0003\u0014\u021c"+ + "\b\u0014\u0001\u0014\u0003\u0014\u021f\b\u0014\u0001\u0014\u0001\u0014"+ + "\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0005\u0015"+ + "\u0228\b\u0015\n\u0015\f\u0015\u022b\t\u0015\u0001\u0015\u0001\u0015\u0001"+ + "\u0015\u0001\u0015\u0001\u0015\u0001\u0016\u0004\u0016\u0233\b\u0016\u000b"+ + "\u0016\f\u0016\u0234\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017\u0001"+ + "\u0017\u0003\u0017\u023c\b\u0017\u0001\u0018\u0004\u0018\u023f\b\u0018"+ + "\u000b\u0018\f\u0018\u0240\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019"+ + "\u0001\u0019\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a"+ + "\u0001\u001b\u0001\u001b\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c"+ + "\u0001\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d"+ + "\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f"+ + "\u0001 \u0001 \u0001!\u0001!\u0001!\u0001\"\u0001\"\u0001#\u0001#\u0003"+ + "#\u0268\b#\u0001#\u0004#\u026b\b#\u000b#\f#\u026c\u0001$\u0001$\u0001"+ + "%\u0001%\u0001&\u0001&\u0001&\u0003&\u0276\b&\u0001\'\u0001\'\u0001(\u0001"+ + "(\u0001(\u0003(\u027d\b(\u0001)\u0001)\u0001)\u0005)\u0282\b)\n)\f)\u0285"+ + "\t)\u0001)\u0001)\u0001)\u0001)\u0001)\u0001)\u0005)\u028d\b)\n)\f)\u0290"+ + "\t)\u0001)\u0001)\u0001)\u0001)\u0001)\u0003)\u0297\b)\u0001)\u0003)\u029a"+ + "\b)\u0003)\u029c\b)\u0001*\u0004*\u029f\b*\u000b*\f*\u02a0\u0001+\u0004"+ + "+\u02a4\b+\u000b+\f+\u02a5\u0001+\u0001+\u0005+\u02aa\b+\n+\f+\u02ad\t"+ + "+\u0001+\u0001+\u0004+\u02b1\b+\u000b+\f+\u02b2\u0001+\u0004+\u02b6\b"+ + "+\u000b+\f+\u02b7\u0001+\u0001+\u0005+\u02bc\b+\n+\f+\u02bf\t+\u0003+"+ + "\u02c1\b+\u0001+\u0001+\u0001+\u0001+\u0004+\u02c7\b+\u000b+\f+\u02c8"+ + "\u0001+\u0001+\u0003+\u02cd\b+\u0001,\u0001,\u0001,\u0001-\u0001-\u0001"+ + "-\u0001-\u0001.\u0001.\u0001.\u0001.\u0001/\u0001/\u00010\u00010\u0001"+ + "0\u00011\u00011\u00012\u00012\u00012\u00012\u00012\u00013\u00013\u0001"+ + "4\u00014\u00014\u00014\u00014\u00014\u00015\u00015\u00015\u00015\u0001"+ + "5\u00015\u00016\u00016\u00016\u00016\u00016\u00017\u00017\u00018\u0001"+ + "8\u00018\u00019\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0001:\u0001"+ + ";\u0001;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001<\u0001=\u0001"+ + "=\u0001=\u0001=\u0001=\u0001=\u0001>\u0001>\u0001>\u0001?\u0001?\u0001"+ + "@\u0001@\u0001@\u0001@\u0001@\u0001@\u0001A\u0001A\u0001B\u0001B\u0001"+ + "B\u0001B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001D\u0001D\u0001E\u0001"+ + "E\u0001E\u0001F\u0001F\u0001G\u0001G\u0001G\u0001H\u0001H\u0001I\u0001"+ + "I\u0001I\u0001J\u0001J\u0001K\u0001K\u0001L\u0001L\u0001M\u0001M\u0001"+ + "N\u0001N\u0001O\u0001O\u0001O\u0001O\u0001O\u0001P\u0001P\u0001P\u0001"+ + "P\u0001P\u0001Q\u0001Q\u0005Q\u0350\bQ\nQ\fQ\u0353\tQ\u0001Q\u0001Q\u0003"+ + "Q\u0357\bQ\u0001Q\u0004Q\u035a\bQ\u000bQ\fQ\u035b\u0003Q\u035e\bQ\u0001"+ + "R\u0001R\u0004R\u0362\bR\u000bR\fR\u0363\u0001R\u0001R\u0001S\u0001S\u0001"+ + "T\u0001T\u0001T\u0001T\u0001U\u0001U\u0001U\u0001U\u0001V\u0001V\u0001"+ + "V\u0001V\u0001W\u0001W\u0001W\u0001W\u0001W\u0001X\u0001X\u0001X\u0001"+ + "X\u0001Y\u0001Y\u0001Y\u0001Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001"+ + "[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0001\\\u0001]\u0001]\u0001]\u0001"+ + "]\u0001]\u0001]\u0001]\u0001]\u0001^\u0001^\u0001^\u0001^\u0001^\u0001"+ + "^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001"+ + "`\u0001`\u0001a\u0001a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001"+ + "c\u0001c\u0001c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001d\u0001e\u0001"+ + "e\u0001e\u0001e\u0001f\u0001f\u0001f\u0001f\u0003f\u03c1\bf\u0001g\u0001"+ + "g\u0003g\u03c5\bg\u0001g\u0005g\u03c8\bg\ng\fg\u03cb\tg\u0001g\u0001g"+ + "\u0003g\u03cf\bg\u0001g\u0004g\u03d2\bg\u000bg\fg\u03d3\u0003g\u03d6\b"+ + "g\u0001h\u0001h\u0004h\u03da\bh\u000bh\fh\u03db\u0001i\u0001i\u0001i\u0001"+ + "i\u0001j\u0001j\u0001j\u0001j\u0001k\u0001k\u0001k\u0001k\u0001l\u0001"+ + "l\u0001l\u0001l\u0001l\u0001m\u0001m\u0001m\u0001m\u0001n\u0001n\u0001"+ + "n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001p\u0001p\u0001p\u0001q\u0001"+ + "q\u0001q\u0001q\u0001r\u0001r\u0001r\u0001r\u0001s\u0001s\u0001s\u0001"+ + "s\u0001t\u0001t\u0001t\u0001t\u0001u\u0001u\u0001u\u0001u\u0001u\u0001"+ + "v\u0001v\u0001v\u0001v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001w\u0001"+ + "x\u0001x\u0001x\u0001x\u0001x\u0001x\u0001x\u0001y\u0001y\u0001z\u0004"+ + "z\u0427\bz\u000bz\fz\u0428\u0001z\u0001z\u0003z\u042d\bz\u0001z\u0004"+ + "z\u0430\bz\u000bz\fz\u0431\u0001{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001"+ + "|\u0001|\u0001}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001~\u0001~\u0001"+ + "\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u0080\u0001\u0080\u0001"+ + "\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0081\u0001\u0081\u0001"+ + "\u0081\u0001\u0081\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001"+ + "\u0083\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001"+ + "\u0084\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001"+ + "\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0087\u0001\u0087\u0001"+ + "\u0087\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0088\u0001"+ + "\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001\u008a\u0001"+ + "\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b\u0001\u008b\u0001"+ + "\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001"+ + "\u008d\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001"+ + "\u008e\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001"+ + "\u0090\u0001\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091\u0001"+ + "\u0091\u0001\u0091\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001"+ + "\u0092\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001"+ + "\u0094\u0001\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001"+ + "\u0095\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001"+ + "\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001"+ + "\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0098\u0001\u0098\u0001"+ + "\u0098\u0001\u0098\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001"+ + "\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009b\u0001\u009b\u0001"+ + "\u009b\u0001\u009b\u0001\u009b\u0001\u009c\u0001\u009c\u0001\u009d\u0001"+ + "\u009d\u0001\u009d\u0001\u009d\u0001\u009d\u0004\u009d\u04cc\b\u009d\u000b"+ + "\u009d\f\u009d\u04cd\u0001\u009e\u0001\u009e\u0001\u009e\u0001\u009e\u0001"+ + "\u009f\u0001\u009f\u0001\u009f\u0001\u009f\u0001\u00a0\u0001\u00a0\u0001"+ + "\u00a0\u0001\u00a0\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001"+ + "\u00a1\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001"+ + "\u00a2\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001\u00a4\u0001"+ + "\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001"+ + "\u00a5\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001\u00a6\u0001"+ + "\u00a6\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a8\u0001"+ + "\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0001"+ + "\u00a9\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001"+ + "\u00aa\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001"+ + "\u00ab\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ac\u0001"+ + "\u00ac\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0002"+ + "\u0229\u028e\u0000\u00ae\u000e\u0001\u0010\u0002\u0012\u0003\u0014\u0004"+ + "\u0016\u0005\u0018\u0006\u001a\u0007\u001c\b\u001e\t \n\"\u000b$\f&\r"+ + "(\u000e*\u000f,\u0010.\u00110\u00122\u00134\u00146\u00158\u0016:\u0017"+ + "<\u0000>\u0018@\u0000B\u0000D\u0019F\u001aH\u001bJ\u001cL\u0000N\u0000"+ + "P\u0000R\u0000T\u0000V\u0000X\u0000Z\u0000\\\u0000^\u0000`\u001db\u001e"+ + "d\u001ff h!j\"l#n$p%r&t\'v(x)z*|+~,\u0080-\u0082.\u0084/\u00860\u0088"+ + "1\u008a2\u008c3\u008e4\u00905\u00926\u00947\u00968\u00989\u009a:\u009c"+ + ";\u009e<\u00a0=\u00a2>\u00a4?\u00a6@\u00a8A\u00aaB\u00acC\u00aeD\u00b0"+ + "E\u00b2\u0000\u00b4F\u00b6G\u00b8H\u00baI\u00bc\u0000\u00be\u0000\u00c0"+ + "\u0000\u00c2\u0000\u00c4\u0000\u00c6\u0000\u00c8J\u00caK\u00cc\u0000\u00ce"+ + "L\u00d0M\u00d2N\u00d4\u0000\u00d6\u0000\u00d8\u0000\u00da\u0000\u00dc"+ + "\u0000\u00deO\u00e0P\u00e2Q\u00e4R\u00e6\u0000\u00e8\u0000\u00ea\u0000"+ + "\u00ec\u0000\u00eeS\u00f0\u0000\u00f2T\u00f4U\u00f6V\u00f8\u0000\u00fa"+ + "\u0000\u00fcW\u00feX\u0100\u0000\u0102Y\u0104\u0000\u0106\u0000\u0108"+ + "Z\u010a[\u010c\\\u010e\u0000\u0110\u0000\u0112\u0000\u0114\u0000\u0116"+ + "\u0000\u0118\u0000\u011a\u0000\u011c]\u011e^\u0120_\u0122\u0000\u0124"+ + "\u0000\u0126\u0000\u0128\u0000\u012a`\u012ca\u012eb\u0130\u0000\u0132"+ + "c\u0134d\u0136e\u0138f\u013a\u0000\u013cg\u013eh\u0140i\u0142j\u0144\u0000"+ + "\u0146k\u0148l\u014am\u014cn\u014eo\u0150\u0000\u0152\u0000\u0154p\u0156"+ + "q\u0158r\u015a\u0000\u015cs\u015et\u0160u\u0162\u0000\u0164\u0000\u0166"+ + "\u0000\u0168\u0000\u000e\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007"+ + "\b\t\n\u000b\f\r\r\u0006\u0000\t\n\r\r //[[]]\u0002\u0000\n\n\r\r\u0003"+ + "\u0000\t\n\r\r \n\u0000\t\n\r\r ,,//==[[]]``||\u0002\u0000**//\u0001"+ + "\u000009\u0002\u0000AZaz\u0005\u0000\"\"\\\\nnrrtt\u0004\u0000\n\n\r\r"+ + "\"\"\\\\\u0002\u0000EEee\u0002\u0000++--\u0001\u0000``\u000b\u0000\t\n"+ + "\r\r \"#,,//::<<>?\\\\||\u0534\u0000\u000e\u0001\u0000\u0000\u0000\u0000"+ + "\u0010\u0001\u0000\u0000\u0000\u0000\u0012\u0001\u0000\u0000\u0000\u0000"+ + "\u0014\u0001\u0000\u0000\u0000\u0000\u0016\u0001\u0000\u0000\u0000\u0000"+ + "\u0018\u0001\u0000\u0000\u0000\u0000\u001a\u0001\u0000\u0000\u0000\u0000"+ + "\u001c\u0001\u0000\u0000\u0000\u0000\u001e\u0001\u0000\u0000\u0000\u0000"+ + " \u0001\u0000\u0000\u0000\u0000\"\u0001\u0000\u0000\u0000\u0000$\u0001"+ + "\u0000\u0000\u0000\u0000&\u0001\u0000\u0000\u0000\u0000(\u0001\u0000\u0000"+ + "\u0000\u0000*\u0001\u0000\u0000\u0000\u0000,\u0001\u0000\u0000\u0000\u0000"+ + ".\u0001\u0000\u0000\u0000\u00000\u0001\u0000\u0000\u0000\u00002\u0001"+ + "\u0000\u0000\u0000\u00004\u0001\u0000\u0000\u0000\u00006\u0001\u0000\u0000"+ + "\u0000\u00008\u0001\u0000\u0000\u0000\u0000:\u0001\u0000\u0000\u0000\u0000"+ + ">\u0001\u0000\u0000\u0000\u0001@\u0001\u0000\u0000\u0000\u0001B\u0001"+ + "\u0000\u0000\u0000\u0001D\u0001\u0000\u0000\u0000\u0001F\u0001\u0000\u0000"+ + "\u0000\u0001H\u0001\u0000\u0000\u0000\u0002J\u0001\u0000\u0000\u0000\u0002"+ + "`\u0001\u0000\u0000\u0000\u0002b\u0001\u0000\u0000\u0000\u0002d\u0001"+ + "\u0000\u0000\u0000\u0002f\u0001\u0000\u0000\u0000\u0002h\u0001\u0000\u0000"+ + "\u0000\u0002j\u0001\u0000\u0000\u0000\u0002l\u0001\u0000\u0000\u0000\u0002"+ + "n\u0001\u0000\u0000\u0000\u0002p\u0001\u0000\u0000\u0000\u0002r\u0001"+ + "\u0000\u0000\u0000\u0002t\u0001\u0000\u0000\u0000\u0002v\u0001\u0000\u0000"+ + "\u0000\u0002x\u0001\u0000\u0000\u0000\u0002z\u0001\u0000\u0000\u0000\u0002"+ + "|\u0001\u0000\u0000\u0000\u0002~\u0001\u0000\u0000\u0000\u0002\u0080\u0001"+ + "\u0000\u0000\u0000\u0002\u0082\u0001\u0000\u0000\u0000\u0002\u0084\u0001"+ + "\u0000\u0000\u0000\u0002\u0086\u0001\u0000\u0000\u0000\u0002\u0088\u0001"+ + "\u0000\u0000\u0000\u0002\u008a\u0001\u0000\u0000\u0000\u0002\u008c\u0001"+ + "\u0000\u0000\u0000\u0002\u008e\u0001\u0000\u0000\u0000\u0002\u0090\u0001"+ + "\u0000\u0000\u0000\u0002\u0092\u0001\u0000\u0000\u0000\u0002\u0094\u0001"+ + "\u0000\u0000\u0000\u0002\u0096\u0001\u0000\u0000\u0000\u0002\u0098\u0001"+ + "\u0000\u0000\u0000\u0002\u009a\u0001\u0000\u0000\u0000\u0002\u009c\u0001"+ + "\u0000\u0000\u0000\u0002\u009e\u0001\u0000\u0000\u0000\u0002\u00a0\u0001"+ + "\u0000\u0000\u0000\u0002\u00a2\u0001\u0000\u0000\u0000\u0002\u00a4\u0001"+ + "\u0000\u0000\u0000\u0002\u00a6\u0001\u0000\u0000\u0000\u0002\u00a8\u0001"+ + "\u0000\u0000\u0000\u0002\u00aa\u0001\u0000\u0000\u0000\u0002\u00ac\u0001"+ + "\u0000\u0000\u0000\u0002\u00ae\u0001\u0000\u0000\u0000\u0002\u00b0\u0001"+ + "\u0000\u0000\u0000\u0002\u00b4\u0001\u0000\u0000\u0000\u0002\u00b6\u0001"+ + "\u0000\u0000\u0000\u0002\u00b8\u0001\u0000\u0000\u0000\u0002\u00ba\u0001"+ + "\u0000\u0000\u0000\u0003\u00bc\u0001\u0000\u0000\u0000\u0003\u00be\u0001"+ + "\u0000\u0000\u0000\u0003\u00c0\u0001\u0000\u0000\u0000\u0003\u00c2\u0001"+ + "\u0000\u0000\u0000\u0003\u00c4\u0001\u0000\u0000\u0000\u0003\u00c6\u0001"+ + "\u0000\u0000\u0000\u0003\u00c8\u0001\u0000\u0000\u0000\u0003\u00ca\u0001"+ + "\u0000\u0000\u0000\u0003\u00cc\u0001\u0000\u0000\u0000\u0003\u00ce\u0001"+ + "\u0000\u0000\u0000\u0003\u00d0\u0001\u0000\u0000\u0000\u0003\u00d2\u0001"+ + "\u0000\u0000\u0000\u0004\u00d4\u0001\u0000\u0000\u0000\u0004\u00d6\u0001"+ + "\u0000\u0000\u0000\u0004\u00d8\u0001\u0000\u0000\u0000\u0004\u00de\u0001"+ + "\u0000\u0000\u0000\u0004\u00e0\u0001\u0000\u0000\u0000\u0004\u00e2\u0001"+ + "\u0000\u0000\u0000\u0004\u00e4\u0001\u0000\u0000\u0000\u0005\u00e6\u0001"+ + "\u0000\u0000\u0000\u0005\u00e8\u0001\u0000\u0000\u0000\u0005\u00ea\u0001"+ + "\u0000\u0000\u0000\u0005\u00ec\u0001\u0000\u0000\u0000\u0005\u00ee\u0001"+ + "\u0000\u0000\u0000\u0005\u00f0\u0001\u0000\u0000\u0000\u0005\u00f2\u0001"+ + "\u0000\u0000\u0000\u0005\u00f4\u0001\u0000\u0000\u0000\u0005\u00f6\u0001"+ + "\u0000\u0000\u0000\u0006\u00f8\u0001\u0000\u0000\u0000\u0006\u00fa\u0001"+ + "\u0000\u0000\u0000\u0006\u00fc\u0001\u0000\u0000\u0000\u0006\u00fe\u0001"+ + "\u0000\u0000\u0000\u0006\u0102\u0001\u0000\u0000\u0000\u0006\u0104\u0001"+ + "\u0000\u0000\u0000\u0006\u0106\u0001\u0000\u0000\u0000\u0006\u0108\u0001"+ + "\u0000\u0000\u0000\u0006\u010a\u0001\u0000\u0000\u0000\u0006\u010c\u0001"+ + "\u0000\u0000\u0000\u0007\u010e\u0001\u0000\u0000\u0000\u0007\u0110\u0001"+ + "\u0000\u0000\u0000\u0007\u0112\u0001\u0000\u0000\u0000\u0007\u0114\u0001"+ + "\u0000\u0000\u0000\u0007\u0116\u0001\u0000\u0000\u0000\u0007\u0118\u0001"+ + "\u0000\u0000\u0000\u0007\u011a\u0001\u0000\u0000\u0000\u0007\u011c\u0001"+ + "\u0000\u0000\u0000\u0007\u011e\u0001\u0000\u0000\u0000\u0007\u0120\u0001"+ + "\u0000\u0000\u0000\b\u0122\u0001\u0000\u0000\u0000\b\u0124\u0001\u0000"+ + "\u0000\u0000\b\u0126\u0001\u0000\u0000\u0000\b\u0128\u0001\u0000\u0000"+ + "\u0000\b\u012a\u0001\u0000\u0000\u0000\b\u012c\u0001\u0000\u0000\u0000"+ + "\b\u012e\u0001\u0000\u0000\u0000\t\u0130\u0001\u0000\u0000\u0000\t\u0132"+ + "\u0001\u0000\u0000\u0000\t\u0134\u0001\u0000\u0000\u0000\t\u0136\u0001"+ + "\u0000\u0000\u0000\t\u0138\u0001\u0000\u0000\u0000\n\u013a\u0001\u0000"+ + "\u0000\u0000\n\u013c\u0001\u0000\u0000\u0000\n\u013e\u0001\u0000\u0000"+ + "\u0000\n\u0140\u0001\u0000\u0000\u0000\n\u0142\u0001\u0000\u0000\u0000"+ + "\u000b\u0144\u0001\u0000\u0000\u0000\u000b\u0146\u0001\u0000\u0000\u0000"+ + "\u000b\u0148\u0001\u0000\u0000\u0000\u000b\u014a\u0001\u0000\u0000\u0000"+ + "\u000b\u014c\u0001\u0000\u0000\u0000\u000b\u014e\u0001\u0000\u0000\u0000"+ + "\f\u0150\u0001\u0000\u0000\u0000\f\u0152\u0001\u0000\u0000\u0000\f\u0154"+ + "\u0001\u0000\u0000\u0000\f\u0156\u0001\u0000\u0000\u0000\f\u0158\u0001"+ + "\u0000\u0000\u0000\r\u015a\u0001\u0000\u0000\u0000\r\u015c\u0001\u0000"+ + "\u0000\u0000\r\u015e\u0001\u0000\u0000\u0000\r\u0160\u0001\u0000\u0000"+ + "\u0000\r\u0162\u0001\u0000\u0000\u0000\r\u0164\u0001\u0000\u0000\u0000"+ + "\r\u0166\u0001\u0000\u0000\u0000\r\u0168\u0001\u0000\u0000\u0000\u000e"+ + "\u016a\u0001\u0000\u0000\u0000\u0010\u0174\u0001\u0000\u0000\u0000\u0012"+ + "\u017b\u0001\u0000\u0000\u0000\u0014\u0184\u0001\u0000\u0000\u0000\u0016"+ + "\u018b\u0001\u0000\u0000\u0000\u0018\u0195\u0001\u0000\u0000\u0000\u001a"+ + "\u019c\u0001\u0000\u0000\u0000\u001c\u01a3\u0001\u0000\u0000\u0000\u001e"+ + "\u01b1\u0001\u0000\u0000\u0000 \u01b8\u0001\u0000\u0000\u0000\"\u01c0"+ + "\u0001\u0000\u0000\u0000$\u01c7\u0001\u0000\u0000\u0000&\u01d1\u0001\u0000"+ + "\u0000\u0000(\u01dd\u0001\u0000\u0000\u0000*\u01e6\u0001\u0000\u0000\u0000"+ + ",\u01ec\u0001\u0000\u0000\u0000.\u01f3\u0001\u0000\u0000\u00000\u01fa"+ + "\u0001\u0000\u0000\u00002\u0202\u0001\u0000\u0000\u00004\u020b\u0001\u0000"+ + "\u0000\u00006\u0211\u0001\u0000\u0000\u00008\u0222\u0001\u0000\u0000\u0000"+ + ":\u0232\u0001\u0000\u0000\u0000<\u023b\u0001\u0000\u0000\u0000>\u023e"+ + "\u0001\u0000\u0000\u0000@\u0242\u0001\u0000\u0000\u0000B\u0247\u0001\u0000"+ + "\u0000\u0000D\u024c\u0001\u0000\u0000\u0000F\u0250\u0001\u0000\u0000\u0000"+ + "H\u0254\u0001\u0000\u0000\u0000J\u0258\u0001\u0000\u0000\u0000L\u025c"+ + "\u0001\u0000\u0000\u0000N\u025e\u0001\u0000\u0000\u0000P\u0260\u0001\u0000"+ + "\u0000\u0000R\u0263\u0001\u0000\u0000\u0000T\u0265\u0001\u0000\u0000\u0000"+ + "V\u026e\u0001\u0000\u0000\u0000X\u0270\u0001\u0000\u0000\u0000Z\u0275"+ + "\u0001\u0000\u0000\u0000\\\u0277\u0001\u0000\u0000\u0000^\u027c\u0001"+ + "\u0000\u0000\u0000`\u029b\u0001\u0000\u0000\u0000b\u029e\u0001\u0000\u0000"+ + "\u0000d\u02cc\u0001\u0000\u0000\u0000f\u02ce\u0001\u0000\u0000\u0000h"+ + "\u02d1\u0001\u0000\u0000\u0000j\u02d5\u0001\u0000\u0000\u0000l\u02d9\u0001"+ + "\u0000\u0000\u0000n\u02db\u0001\u0000\u0000\u0000p\u02de\u0001\u0000\u0000"+ + "\u0000r\u02e0\u0001\u0000\u0000\u0000t\u02e5\u0001\u0000\u0000\u0000v"+ + "\u02e7\u0001\u0000\u0000\u0000x\u02ed\u0001\u0000\u0000\u0000z\u02f3\u0001"+ + "\u0000\u0000\u0000|\u02f8\u0001\u0000\u0000\u0000~\u02fa\u0001\u0000\u0000"+ + "\u0000\u0080\u02fd\u0001\u0000\u0000\u0000\u0082\u0300\u0001\u0000\u0000"+ + "\u0000\u0084\u0305\u0001\u0000\u0000\u0000\u0086\u0309\u0001\u0000\u0000"+ + "\u0000\u0088\u030e\u0001\u0000\u0000\u0000\u008a\u0314\u0001\u0000\u0000"+ + "\u0000\u008c\u0317\u0001\u0000\u0000\u0000\u008e\u0319\u0001\u0000\u0000"+ + "\u0000\u0090\u031f\u0001\u0000\u0000\u0000\u0092\u0321\u0001\u0000\u0000"+ + "\u0000\u0094\u0326\u0001\u0000\u0000\u0000\u0096\u0329\u0001\u0000\u0000"+ + "\u0000\u0098\u032c\u0001\u0000\u0000\u0000\u009a\u032f\u0001\u0000\u0000"+ + "\u0000\u009c\u0331\u0001\u0000\u0000\u0000\u009e\u0334\u0001\u0000\u0000"+ + "\u0000\u00a0\u0336\u0001\u0000\u0000\u0000\u00a2\u0339\u0001\u0000\u0000"+ + "\u0000\u00a4\u033b\u0001\u0000\u0000\u0000\u00a6\u033d\u0001\u0000\u0000"+ + "\u0000\u00a8\u033f\u0001\u0000\u0000\u0000\u00aa\u0341\u0001\u0000\u0000"+ + "\u0000\u00ac\u0343\u0001\u0000\u0000\u0000\u00ae\u0348\u0001\u0000\u0000"+ + "\u0000\u00b0\u035d\u0001\u0000\u0000\u0000\u00b2\u035f\u0001\u0000\u0000"+ + "\u0000\u00b4\u0367\u0001\u0000\u0000\u0000\u00b6\u0369\u0001\u0000\u0000"+ + "\u0000\u00b8\u036d\u0001\u0000\u0000\u0000\u00ba\u0371\u0001\u0000\u0000"+ + "\u0000\u00bc\u0375\u0001\u0000\u0000\u0000\u00be\u037a\u0001\u0000\u0000"+ + "\u0000\u00c0\u037e\u0001\u0000\u0000\u0000\u00c2\u0382\u0001\u0000\u0000"+ + "\u0000\u00c4\u0386\u0001\u0000\u0000\u0000\u00c6\u038a\u0001\u0000\u0000"+ + "\u0000\u00c8\u038e\u0001\u0000\u0000\u0000\u00ca\u0396\u0001\u0000\u0000"+ + "\u0000\u00cc\u039f\u0001\u0000\u0000\u0000\u00ce\u03a3\u0001\u0000\u0000"+ + "\u0000\u00d0\u03a7\u0001\u0000\u0000\u0000\u00d2\u03ab\u0001\u0000\u0000"+ + "\u0000\u00d4\u03af\u0001\u0000\u0000\u0000\u00d6\u03b4\u0001\u0000\u0000"+ + "\u0000\u00d8\u03b8\u0001\u0000\u0000\u0000\u00da\u03c0\u0001\u0000\u0000"+ + "\u0000\u00dc\u03d5\u0001\u0000\u0000\u0000\u00de\u03d9\u0001\u0000\u0000"+ + "\u0000\u00e0\u03dd\u0001\u0000\u0000\u0000\u00e2\u03e1\u0001\u0000\u0000"+ + "\u0000\u00e4\u03e5\u0001\u0000\u0000\u0000\u00e6\u03e9\u0001\u0000\u0000"+ + "\u0000\u00e8\u03ee\u0001\u0000\u0000\u0000\u00ea\u03f2\u0001\u0000\u0000"+ + "\u0000\u00ec\u03f6\u0001\u0000\u0000\u0000\u00ee\u03fa\u0001\u0000\u0000"+ + "\u0000\u00f0\u03fd\u0001\u0000\u0000\u0000\u00f2\u0401\u0001\u0000\u0000"+ + "\u0000\u00f4\u0405\u0001\u0000\u0000\u0000\u00f6\u0409\u0001\u0000\u0000"+ + "\u0000\u00f8\u040d\u0001\u0000\u0000\u0000\u00fa\u0412\u0001\u0000\u0000"+ + "\u0000\u00fc\u0417\u0001\u0000\u0000\u0000\u00fe\u041c\u0001\u0000\u0000"+ + "\u0000\u0100\u0423\u0001\u0000\u0000\u0000\u0102\u042c\u0001\u0000\u0000"+ + "\u0000\u0104\u0433\u0001\u0000\u0000\u0000\u0106\u0437\u0001\u0000\u0000"+ + "\u0000\u0108\u043b\u0001\u0000\u0000\u0000\u010a\u043f\u0001\u0000\u0000"+ + "\u0000\u010c\u0443\u0001\u0000\u0000\u0000\u010e\u0447\u0001\u0000\u0000"+ + "\u0000\u0110\u044d\u0001\u0000\u0000\u0000\u0112\u0451\u0001\u0000\u0000"+ + "\u0000\u0114\u0455\u0001\u0000\u0000\u0000\u0116\u0459\u0001\u0000\u0000"+ + "\u0000\u0118\u045d\u0001\u0000\u0000\u0000\u011a\u0461\u0001\u0000\u0000"+ + "\u0000\u011c\u0465\u0001\u0000\u0000\u0000\u011e\u0469\u0001\u0000\u0000"+ + "\u0000\u0120\u046d\u0001\u0000\u0000\u0000\u0122\u0471\u0001\u0000\u0000"+ + "\u0000\u0124\u0476\u0001\u0000\u0000\u0000\u0126\u047a\u0001\u0000\u0000"+ + "\u0000\u0128\u047e\u0001\u0000\u0000\u0000\u012a\u0482\u0001\u0000\u0000"+ + "\u0000\u012c\u0486\u0001\u0000\u0000\u0000\u012e\u048a\u0001\u0000\u0000"+ + "\u0000\u0130\u048e\u0001\u0000\u0000\u0000\u0132\u0493\u0001\u0000\u0000"+ + "\u0000\u0134\u0498\u0001\u0000\u0000\u0000\u0136\u049c\u0001\u0000\u0000"+ + "\u0000\u0138\u04a0\u0001\u0000\u0000\u0000\u013a\u04a4\u0001\u0000\u0000"+ + "\u0000\u013c\u04a9\u0001\u0000\u0000\u0000\u013e\u04b3\u0001\u0000\u0000"+ + "\u0000\u0140\u04b7\u0001\u0000\u0000\u0000\u0142\u04bb\u0001\u0000\u0000"+ + "\u0000\u0144\u04bf\u0001\u0000\u0000\u0000\u0146\u04c4\u0001\u0000\u0000"+ + "\u0000\u0148\u04cb\u0001\u0000\u0000\u0000\u014a\u04cf\u0001\u0000\u0000"+ + "\u0000\u014c\u04d3\u0001\u0000\u0000\u0000\u014e\u04d7\u0001\u0000\u0000"+ + "\u0000\u0150\u04db\u0001\u0000\u0000\u0000\u0152\u04e0\u0001\u0000\u0000"+ + "\u0000\u0154\u04e6\u0001\u0000\u0000\u0000\u0156\u04ea\u0001\u0000\u0000"+ + "\u0000\u0158\u04ee\u0001\u0000\u0000\u0000\u015a\u04f2\u0001\u0000\u0000"+ + "\u0000\u015c\u04f8\u0001\u0000\u0000\u0000\u015e\u04fc\u0001\u0000\u0000"+ + "\u0000\u0160\u0500\u0001\u0000\u0000\u0000\u0162\u0504\u0001\u0000\u0000"+ + "\u0000\u0164\u050a\u0001\u0000\u0000\u0000\u0166\u0510\u0001\u0000\u0000"+ + "\u0000\u0168\u0516\u0001\u0000\u0000\u0000\u016a\u016b\u0005d\u0000\u0000"+ + "\u016b\u016c\u0005i\u0000\u0000\u016c\u016d\u0005s\u0000\u0000\u016d\u016e"+ + "\u0005s\u0000\u0000\u016e\u016f\u0005e\u0000\u0000\u016f\u0170\u0005c"+ + "\u0000\u0000\u0170\u0171\u0005t\u0000\u0000\u0171\u0172\u0001\u0000\u0000"+ + "\u0000\u0172\u0173\u0006\u0000\u0000\u0000\u0173\u000f\u0001\u0000\u0000"+ + "\u0000\u0174\u0175\u0005d\u0000\u0000\u0175\u0176\u0005r\u0000\u0000\u0176"+ + "\u0177\u0005o\u0000\u0000\u0177\u0178\u0005p\u0000\u0000\u0178\u0179\u0001"+ + "\u0000\u0000\u0000\u0179\u017a\u0006\u0001\u0001\u0000\u017a\u0011\u0001"+ + "\u0000\u0000\u0000\u017b\u017c\u0005e\u0000\u0000\u017c\u017d\u0005n\u0000"+ + "\u0000\u017d\u017e\u0005r\u0000\u0000\u017e\u017f\u0005i\u0000\u0000\u017f"+ + "\u0180\u0005c\u0000\u0000\u0180\u0181\u0005h\u0000\u0000\u0181\u0182\u0001"+ + "\u0000\u0000\u0000\u0182\u0183\u0006\u0002\u0002\u0000\u0183\u0013\u0001"+ + "\u0000\u0000\u0000\u0184\u0185\u0005e\u0000\u0000\u0185\u0186\u0005v\u0000"+ + "\u0000\u0186\u0187\u0005a\u0000\u0000\u0187\u0188\u0005l\u0000\u0000\u0188"+ + "\u0189\u0001\u0000\u0000\u0000\u0189\u018a\u0006\u0003\u0000\u0000\u018a"+ + "\u0015\u0001\u0000\u0000\u0000\u018b\u018c\u0005e\u0000\u0000\u018c\u018d"+ + "\u0005x\u0000\u0000\u018d\u018e\u0005p\u0000\u0000\u018e\u018f\u0005l"+ + "\u0000\u0000\u018f\u0190\u0005a\u0000\u0000\u0190\u0191\u0005i\u0000\u0000"+ + "\u0191\u0192\u0005n\u0000\u0000\u0192\u0193\u0001\u0000\u0000\u0000\u0193"+ + "\u0194\u0006\u0004\u0003\u0000\u0194\u0017\u0001\u0000\u0000\u0000\u0195"+ + "\u0196\u0005f\u0000\u0000\u0196\u0197\u0005r\u0000\u0000\u0197\u0198\u0005"+ + "o\u0000\u0000\u0198\u0199\u0005m\u0000\u0000\u0199\u019a\u0001\u0000\u0000"+ + "\u0000\u019a\u019b\u0006\u0005\u0004\u0000\u019b\u0019\u0001\u0000\u0000"+ + "\u0000\u019c\u019d\u0005g\u0000\u0000\u019d\u019e\u0005r\u0000\u0000\u019e"+ + "\u019f\u0005o\u0000\u0000\u019f\u01a0\u0005k\u0000\u0000\u01a0\u01a1\u0001"+ + "\u0000\u0000\u0000\u01a1\u01a2\u0006\u0006\u0000\u0000\u01a2\u001b\u0001"+ + "\u0000\u0000\u0000\u01a3\u01a4\u0005i\u0000\u0000\u01a4\u01a5\u0005n\u0000"+ + "\u0000\u01a5\u01a6\u0005l\u0000\u0000\u01a6\u01a7\u0005i\u0000\u0000\u01a7"+ + "\u01a8\u0005n\u0000\u0000\u01a8\u01a9\u0005e\u0000\u0000\u01a9\u01aa\u0005"+ + "s\u0000\u0000\u01aa\u01ab\u0005t\u0000\u0000\u01ab\u01ac\u0005a\u0000"+ + "\u0000\u01ac\u01ad\u0005t\u0000\u0000\u01ad\u01ae\u0005s\u0000\u0000\u01ae"+ + "\u01af\u0001\u0000\u0000\u0000\u01af\u01b0\u0006\u0007\u0000\u0000\u01b0"+ + "\u001d\u0001\u0000\u0000\u0000\u01b1\u01b2\u0005k\u0000\u0000\u01b2\u01b3"+ + "\u0005e\u0000\u0000\u01b3\u01b4\u0005e\u0000\u0000\u01b4\u01b5\u0005p"+ + "\u0000\u0000\u01b5\u01b6\u0001\u0000\u0000\u0000\u01b6\u01b7\u0006\b\u0001"+ + "\u0000\u01b7\u001f\u0001\u0000\u0000\u0000\u01b8\u01b9\u0005l\u0000\u0000"+ + "\u01b9\u01ba\u0005i\u0000\u0000\u01ba\u01bb\u0005m\u0000\u0000\u01bb\u01bc"+ + "\u0005i\u0000\u0000\u01bc\u01bd\u0005t\u0000\u0000\u01bd\u01be\u0001\u0000"+ + "\u0000\u0000\u01be\u01bf\u0006\t\u0000\u0000\u01bf!\u0001\u0000\u0000"+ + "\u0000\u01c0\u01c1\u0005m\u0000\u0000\u01c1\u01c2\u0005e\u0000\u0000\u01c2"+ + "\u01c3\u0005t\u0000\u0000\u01c3\u01c4\u0005a\u0000\u0000\u01c4\u01c5\u0001"+ + "\u0000\u0000\u0000\u01c5\u01c6\u0006\n\u0005\u0000\u01c6#\u0001\u0000"+ + "\u0000\u0000\u01c7\u01c8\u0005m\u0000\u0000\u01c8\u01c9\u0005e\u0000\u0000"+ + "\u01c9\u01ca\u0005t\u0000\u0000\u01ca\u01cb\u0005r\u0000\u0000\u01cb\u01cc"+ + "\u0005i\u0000\u0000\u01cc\u01cd\u0005c\u0000\u0000\u01cd\u01ce\u0005s"+ + "\u0000\u0000\u01ce\u01cf\u0001\u0000\u0000\u0000\u01cf\u01d0\u0006\u000b"+ + "\u0006\u0000\u01d0%\u0001\u0000\u0000\u0000\u01d1\u01d2\u0005m\u0000\u0000"+ + "\u01d2\u01d3\u0005v\u0000\u0000\u01d3\u01d4\u0005_\u0000\u0000\u01d4\u01d5"+ + "\u0005e\u0000\u0000\u01d5\u01d6\u0005x\u0000\u0000\u01d6\u01d7\u0005p"+ + "\u0000\u0000\u01d7\u01d8\u0005a\u0000\u0000\u01d8\u01d9\u0005n\u0000\u0000"+ + "\u01d9\u01da\u0005d\u0000\u0000\u01da\u01db\u0001\u0000\u0000\u0000\u01db"+ + "\u01dc\u0006\f\u0007\u0000\u01dc\'\u0001\u0000\u0000\u0000\u01dd\u01de"+ + "\u0005r\u0000\u0000\u01de\u01df\u0005e\u0000\u0000\u01df\u01e0\u0005n"+ + "\u0000\u0000\u01e0\u01e1\u0005a\u0000\u0000\u01e1\u01e2\u0005m\u0000\u0000"+ + "\u01e2\u01e3\u0005e\u0000\u0000\u01e3\u01e4\u0001\u0000\u0000\u0000\u01e4"+ + "\u01e5\u0006\r\b\u0000\u01e5)\u0001\u0000\u0000\u0000\u01e6\u01e7\u0005"+ + "r\u0000\u0000\u01e7\u01e8\u0005o\u0000\u0000\u01e8\u01e9\u0005w\u0000"+ + "\u0000\u01e9\u01ea\u0001\u0000\u0000\u0000\u01ea\u01eb\u0006\u000e\u0000"+ + "\u0000\u01eb+\u0001\u0000\u0000\u0000\u01ec\u01ed\u0005s\u0000\u0000\u01ed"+ + "\u01ee\u0005h\u0000\u0000\u01ee\u01ef\u0005o\u0000\u0000\u01ef\u01f0\u0005"+ + "w\u0000\u0000\u01f0\u01f1\u0001\u0000\u0000\u0000\u01f1\u01f2\u0006\u000f"+ + "\t\u0000\u01f2-\u0001\u0000\u0000\u0000\u01f3\u01f4\u0005s\u0000\u0000"+ + "\u01f4\u01f5\u0005o\u0000\u0000\u01f5\u01f6\u0005r\u0000\u0000\u01f6\u01f7"+ + "\u0005t\u0000\u0000\u01f7\u01f8\u0001\u0000\u0000\u0000\u01f8\u01f9\u0006"+ + "\u0010\u0000\u0000\u01f9/\u0001\u0000\u0000\u0000\u01fa\u01fb\u0005s\u0000"+ + "\u0000\u01fb\u01fc\u0005t\u0000\u0000\u01fc\u01fd\u0005a\u0000\u0000\u01fd"+ + "\u01fe\u0005t\u0000\u0000\u01fe\u01ff\u0005s\u0000\u0000\u01ff\u0200\u0001"+ + "\u0000\u0000\u0000\u0200\u0201\u0006\u0011\u0000\u0000\u02011\u0001\u0000"+ + "\u0000\u0000\u0202\u0203\u0005w\u0000\u0000\u0203\u0204\u0005h\u0000\u0000"+ + "\u0204\u0205\u0005e\u0000\u0000\u0205\u0206\u0005r\u0000\u0000\u0206\u0207"+ + "\u0005e\u0000\u0000\u0207\u0208\u0001\u0000\u0000\u0000\u0208\u0209\u0006"+ + "\u0012\u0000\u0000\u02093\u0001\u0000\u0000\u0000\u020a\u020c\b\u0000"+ + "\u0000\u0000\u020b\u020a\u0001\u0000\u0000\u0000\u020c\u020d\u0001\u0000"+ + "\u0000\u0000\u020d\u020b\u0001\u0000\u0000\u0000\u020d\u020e\u0001\u0000"+ + "\u0000\u0000\u020e\u020f\u0001\u0000\u0000\u0000\u020f\u0210\u0006\u0013"+ + "\u0000\u0000\u02105\u0001\u0000\u0000\u0000\u0211\u0212\u0005/\u0000\u0000"+ + "\u0212\u0213\u0005/\u0000\u0000\u0213\u0217\u0001\u0000\u0000\u0000\u0214"+ + "\u0216\b\u0001\u0000\u0000\u0215\u0214\u0001\u0000\u0000\u0000\u0216\u0219"+ + "\u0001\u0000\u0000\u0000\u0217\u0215\u0001\u0000\u0000\u0000\u0217\u0218"+ + "\u0001\u0000\u0000\u0000\u0218\u021b\u0001\u0000\u0000\u0000\u0219\u0217"+ + "\u0001\u0000\u0000\u0000\u021a\u021c\u0005\r\u0000\u0000\u021b\u021a\u0001"+ + "\u0000\u0000\u0000\u021b\u021c\u0001\u0000\u0000\u0000\u021c\u021e\u0001"+ + "\u0000\u0000\u0000\u021d\u021f\u0005\n\u0000\u0000\u021e\u021d\u0001\u0000"+ + "\u0000\u0000\u021e\u021f\u0001\u0000\u0000\u0000\u021f\u0220\u0001\u0000"+ + "\u0000\u0000\u0220\u0221\u0006\u0014\n\u0000\u02217\u0001\u0000\u0000"+ + "\u0000\u0222\u0223\u0005/\u0000\u0000\u0223\u0224\u0005*\u0000\u0000\u0224"+ + "\u0229\u0001\u0000\u0000\u0000\u0225\u0228\u00038\u0015\u0000\u0226\u0228"+ + "\t\u0000\u0000\u0000\u0227\u0225\u0001\u0000\u0000\u0000\u0227\u0226\u0001"+ + "\u0000\u0000\u0000\u0228\u022b\u0001\u0000\u0000\u0000\u0229\u022a\u0001"+ + "\u0000\u0000\u0000\u0229\u0227\u0001\u0000\u0000\u0000\u022a\u022c\u0001"+ + "\u0000\u0000\u0000\u022b\u0229\u0001\u0000\u0000\u0000\u022c\u022d\u0005"+ + "*\u0000\u0000\u022d\u022e\u0005/\u0000\u0000\u022e\u022f\u0001\u0000\u0000"+ + "\u0000\u022f\u0230\u0006\u0015\n\u0000\u02309\u0001\u0000\u0000\u0000"+ + "\u0231\u0233\u0007\u0002\u0000\u0000\u0232\u0231\u0001\u0000\u0000\u0000"+ + "\u0233\u0234\u0001\u0000\u0000\u0000\u0234\u0232\u0001\u0000\u0000\u0000"+ + "\u0234\u0235\u0001\u0000\u0000\u0000\u0235\u0236\u0001\u0000\u0000\u0000"+ + "\u0236\u0237\u0006\u0016\n\u0000\u0237;\u0001\u0000\u0000\u0000\u0238"+ + "\u023c\b\u0003\u0000\u0000\u0239\u023a\u0005/\u0000\u0000\u023a\u023c"+ + "\b\u0004\u0000\u0000\u023b\u0238\u0001\u0000\u0000\u0000\u023b\u0239\u0001"+ + "\u0000\u0000\u0000\u023c=\u0001\u0000\u0000\u0000\u023d\u023f\u0003<\u0017"+ + "\u0000\u023e\u023d\u0001\u0000\u0000\u0000\u023f\u0240\u0001\u0000\u0000"+ + "\u0000\u0240\u023e\u0001\u0000\u0000\u0000\u0240\u0241\u0001\u0000\u0000"+ + "\u0000\u0241?\u0001\u0000\u0000\u0000\u0242\u0243\u0003\u00acO\u0000\u0243"+ + "\u0244\u0001\u0000\u0000\u0000\u0244\u0245\u0006\u0019\u000b\u0000\u0245"+ + "\u0246\u0006\u0019\f\u0000\u0246A\u0001\u0000\u0000\u0000\u0247\u0248"+ + "\u0003J\u001e\u0000\u0248\u0249\u0001\u0000\u0000\u0000\u0249\u024a\u0006"+ + "\u001a\r\u0000\u024a\u024b\u0006\u001a\u000e\u0000\u024bC\u0001\u0000"+ + "\u0000\u0000\u024c\u024d\u0003:\u0016\u0000\u024d\u024e\u0001\u0000\u0000"+ + "\u0000\u024e\u024f\u0006\u001b\n\u0000\u024fE\u0001\u0000\u0000\u0000"+ + "\u0250\u0251\u00036\u0014\u0000\u0251\u0252\u0001\u0000\u0000\u0000\u0252"+ + "\u0253\u0006\u001c\n\u0000\u0253G\u0001\u0000\u0000\u0000\u0254\u0255"+ + "\u00038\u0015\u0000\u0255\u0256\u0001\u0000\u0000\u0000\u0256\u0257\u0006"+ + "\u001d\n\u0000\u0257I\u0001\u0000\u0000\u0000\u0258\u0259\u0005|\u0000"+ + "\u0000\u0259\u025a\u0001\u0000\u0000\u0000\u025a\u025b\u0006\u001e\u000e"+ + "\u0000\u025bK\u0001\u0000\u0000\u0000\u025c\u025d\u0007\u0005\u0000\u0000"+ + "\u025dM\u0001\u0000\u0000\u0000\u025e\u025f\u0007\u0006\u0000\u0000\u025f"+ + "O\u0001\u0000\u0000\u0000\u0260\u0261\u0005\\\u0000\u0000\u0261\u0262"+ + "\u0007\u0007\u0000\u0000\u0262Q\u0001\u0000\u0000\u0000\u0263\u0264\b"+ + "\b\u0000\u0000\u0264S\u0001\u0000\u0000\u0000\u0265\u0267\u0007\t\u0000"+ + "\u0000\u0266\u0268\u0007\n\u0000\u0000\u0267\u0266\u0001\u0000\u0000\u0000"+ + "\u0267\u0268\u0001\u0000\u0000\u0000\u0268\u026a\u0001\u0000\u0000\u0000"+ + "\u0269\u026b\u0003L\u001f\u0000\u026a\u0269\u0001\u0000\u0000\u0000\u026b"+ + "\u026c\u0001\u0000\u0000\u0000\u026c\u026a\u0001\u0000\u0000\u0000\u026c"+ + "\u026d\u0001\u0000\u0000\u0000\u026dU\u0001\u0000\u0000\u0000\u026e\u026f"+ + "\u0005@\u0000\u0000\u026fW\u0001\u0000\u0000\u0000\u0270\u0271\u0005`"+ + "\u0000\u0000\u0271Y\u0001\u0000\u0000\u0000\u0272\u0276\b\u000b\u0000"+ + "\u0000\u0273\u0274\u0005`\u0000\u0000\u0274\u0276\u0005`\u0000\u0000\u0275"+ + "\u0272\u0001\u0000\u0000\u0000\u0275\u0273\u0001\u0000\u0000\u0000\u0276"+ + "[\u0001\u0000\u0000\u0000\u0277\u0278\u0005_\u0000\u0000\u0278]\u0001"+ + "\u0000\u0000\u0000\u0279\u027d\u0003N \u0000\u027a\u027d\u0003L\u001f"+ + "\u0000\u027b\u027d\u0003\\\'\u0000\u027c\u0279\u0001\u0000\u0000\u0000"+ + "\u027c\u027a\u0001\u0000\u0000\u0000\u027c\u027b\u0001\u0000\u0000\u0000"+ + "\u027d_\u0001\u0000\u0000\u0000\u027e\u0283\u0005\"\u0000\u0000\u027f"+ + "\u0282\u0003P!\u0000\u0280\u0282\u0003R\"\u0000\u0281\u027f\u0001\u0000"+ + "\u0000\u0000\u0281\u0280\u0001\u0000\u0000\u0000\u0282\u0285\u0001\u0000"+ + "\u0000\u0000\u0283\u0281\u0001\u0000\u0000\u0000\u0283\u0284\u0001\u0000"+ + "\u0000\u0000\u0284\u0286\u0001\u0000\u0000\u0000\u0285\u0283\u0001\u0000"+ + "\u0000\u0000\u0286\u029c\u0005\"\u0000\u0000\u0287\u0288\u0005\"\u0000"+ + "\u0000\u0288\u0289\u0005\"\u0000\u0000\u0289\u028a\u0005\"\u0000\u0000"+ + "\u028a\u028e\u0001\u0000\u0000\u0000\u028b\u028d\b\u0001\u0000\u0000\u028c"+ + "\u028b\u0001\u0000\u0000\u0000\u028d\u0290\u0001\u0000\u0000\u0000\u028e"+ + "\u028f\u0001\u0000\u0000\u0000\u028e\u028c\u0001\u0000\u0000\u0000\u028f"+ + "\u0291\u0001\u0000\u0000\u0000\u0290\u028e\u0001\u0000\u0000\u0000\u0291"+ + "\u0292\u0005\"\u0000\u0000\u0292\u0293\u0005\"\u0000\u0000\u0293\u0294"+ + "\u0005\"\u0000\u0000\u0294\u0296\u0001\u0000\u0000\u0000\u0295\u0297\u0005"+ + "\"\u0000\u0000\u0296\u0295\u0001\u0000\u0000\u0000\u0296\u0297\u0001\u0000"+ + "\u0000\u0000\u0297\u0299\u0001\u0000\u0000\u0000\u0298\u029a\u0005\"\u0000"+ + "\u0000\u0299\u0298\u0001\u0000\u0000\u0000\u0299\u029a\u0001\u0000\u0000"+ + "\u0000\u029a\u029c\u0001\u0000\u0000\u0000\u029b\u027e\u0001\u0000\u0000"+ + "\u0000\u029b\u0287\u0001\u0000\u0000\u0000\u029ca\u0001\u0000\u0000\u0000"+ + "\u029d\u029f\u0003L\u001f\u0000\u029e\u029d\u0001\u0000\u0000\u0000\u029f"+ + "\u02a0\u0001\u0000\u0000\u0000\u02a0\u029e\u0001\u0000\u0000\u0000\u02a0"+ + "\u02a1\u0001\u0000\u0000\u0000\u02a1c\u0001\u0000\u0000\u0000\u02a2\u02a4"+ + "\u0003L\u001f\u0000\u02a3\u02a2\u0001\u0000\u0000\u0000\u02a4\u02a5\u0001"+ + "\u0000\u0000\u0000\u02a5\u02a3\u0001\u0000\u0000\u0000\u02a5\u02a6\u0001"+ + "\u0000\u0000\u0000\u02a6\u02a7\u0001\u0000\u0000\u0000\u02a7\u02ab\u0003"+ + "t3\u0000\u02a8\u02aa\u0003L\u001f\u0000\u02a9\u02a8\u0001\u0000\u0000"+ + "\u0000\u02aa\u02ad\u0001\u0000\u0000\u0000\u02ab\u02a9\u0001\u0000\u0000"+ + "\u0000\u02ab\u02ac\u0001\u0000\u0000\u0000\u02ac\u02cd\u0001\u0000\u0000"+ + "\u0000\u02ad\u02ab\u0001\u0000\u0000\u0000\u02ae\u02b0\u0003t3\u0000\u02af"+ + "\u02b1\u0003L\u001f\u0000\u02b0\u02af\u0001\u0000\u0000\u0000\u02b1\u02b2"+ + "\u0001\u0000\u0000\u0000\u02b2\u02b0\u0001\u0000\u0000\u0000\u02b2\u02b3"+ + "\u0001\u0000\u0000\u0000\u02b3\u02cd\u0001\u0000\u0000\u0000\u02b4\u02b6"+ + "\u0003L\u001f\u0000\u02b5\u02b4\u0001\u0000\u0000\u0000\u02b6\u02b7\u0001"+ + "\u0000\u0000\u0000\u02b7\u02b5\u0001\u0000\u0000\u0000\u02b7\u02b8\u0001"+ + "\u0000\u0000\u0000\u02b8\u02c0\u0001\u0000\u0000\u0000\u02b9\u02bd\u0003"+ + "t3\u0000\u02ba\u02bc\u0003L\u001f\u0000\u02bb\u02ba\u0001\u0000\u0000"+ + "\u0000\u02bc\u02bf\u0001\u0000\u0000\u0000\u02bd\u02bb\u0001\u0000\u0000"+ + "\u0000\u02bd\u02be\u0001\u0000\u0000\u0000\u02be\u02c1\u0001\u0000\u0000"+ + "\u0000\u02bf\u02bd\u0001\u0000\u0000\u0000\u02c0\u02b9\u0001\u0000\u0000"+ + "\u0000\u02c0\u02c1\u0001\u0000\u0000\u0000\u02c1\u02c2\u0001\u0000\u0000"+ + "\u0000\u02c2\u02c3\u0003T#\u0000\u02c3\u02cd\u0001\u0000\u0000\u0000\u02c4"+ + "\u02c6\u0003t3\u0000\u02c5\u02c7\u0003L\u001f\u0000\u02c6\u02c5\u0001"+ + "\u0000\u0000\u0000\u02c7\u02c8\u0001\u0000\u0000\u0000\u02c8\u02c6\u0001"+ + "\u0000\u0000\u0000\u02c8\u02c9\u0001\u0000\u0000\u0000\u02c9\u02ca\u0001"+ + "\u0000\u0000\u0000\u02ca\u02cb\u0003T#\u0000\u02cb\u02cd\u0001\u0000\u0000"+ + "\u0000\u02cc\u02a3\u0001\u0000\u0000\u0000\u02cc\u02ae\u0001\u0000\u0000"+ + "\u0000\u02cc\u02b5\u0001\u0000\u0000\u0000\u02cc\u02c4\u0001\u0000\u0000"+ + "\u0000\u02cde\u0001\u0000\u0000\u0000\u02ce\u02cf\u0005b\u0000\u0000\u02cf"+ + "\u02d0\u0005y\u0000\u0000\u02d0g\u0001\u0000\u0000\u0000\u02d1\u02d2\u0005"+ + "a\u0000\u0000\u02d2\u02d3\u0005n\u0000\u0000\u02d3\u02d4\u0005d\u0000"+ + "\u0000\u02d4i\u0001\u0000\u0000\u0000\u02d5\u02d6\u0005a\u0000\u0000\u02d6"+ + "\u02d7\u0005s\u0000\u0000\u02d7\u02d8\u0005c\u0000\u0000\u02d8k\u0001"+ + "\u0000\u0000\u0000\u02d9\u02da\u0005=\u0000\u0000\u02dam\u0001\u0000\u0000"+ + "\u0000\u02db\u02dc\u0005:\u0000\u0000\u02dc\u02dd\u0005:\u0000\u0000\u02dd"+ + "o\u0001\u0000\u0000\u0000\u02de\u02df\u0005,\u0000\u0000\u02dfq\u0001"+ + "\u0000\u0000\u0000\u02e0\u02e1\u0005d\u0000\u0000\u02e1\u02e2\u0005e\u0000"+ + "\u0000\u02e2\u02e3\u0005s\u0000\u0000\u02e3\u02e4\u0005c\u0000\u0000\u02e4"+ + "s\u0001\u0000\u0000\u0000\u02e5\u02e6\u0005.\u0000\u0000\u02e6u\u0001"+ + "\u0000\u0000\u0000\u02e7\u02e8\u0005f\u0000\u0000\u02e8\u02e9\u0005a\u0000"+ + "\u0000\u02e9\u02ea\u0005l\u0000\u0000\u02ea\u02eb\u0005s\u0000\u0000\u02eb"+ + "\u02ec\u0005e\u0000\u0000\u02ecw\u0001\u0000\u0000\u0000\u02ed\u02ee\u0005"+ + "f\u0000\u0000\u02ee\u02ef\u0005i\u0000\u0000\u02ef\u02f0\u0005r\u0000"+ + "\u0000\u02f0\u02f1\u0005s\u0000\u0000\u02f1\u02f2\u0005t\u0000\u0000\u02f2"+ + "y\u0001\u0000\u0000\u0000\u02f3\u02f4\u0005l\u0000\u0000\u02f4\u02f5\u0005"+ + "a\u0000\u0000\u02f5\u02f6\u0005s\u0000\u0000\u02f6\u02f7\u0005t\u0000"+ + "\u0000\u02f7{\u0001\u0000\u0000\u0000\u02f8\u02f9\u0005(\u0000\u0000\u02f9"+ + "}\u0001\u0000\u0000\u0000\u02fa\u02fb\u0005i\u0000\u0000\u02fb\u02fc\u0005"+ + "n\u0000\u0000\u02fc\u007f\u0001\u0000\u0000\u0000\u02fd\u02fe\u0005i\u0000"+ + "\u0000\u02fe\u02ff\u0005s\u0000\u0000\u02ff\u0081\u0001\u0000\u0000\u0000"+ + "\u0300\u0301\u0005l\u0000\u0000\u0301\u0302\u0005i\u0000\u0000\u0302\u0303"+ + "\u0005k\u0000\u0000\u0303\u0304\u0005e\u0000\u0000\u0304\u0083\u0001\u0000"+ + "\u0000\u0000\u0305\u0306\u0005n\u0000\u0000\u0306\u0307\u0005o\u0000\u0000"+ + "\u0307\u0308\u0005t\u0000\u0000\u0308\u0085\u0001\u0000\u0000\u0000\u0309"+ + "\u030a\u0005n\u0000\u0000\u030a\u030b\u0005u\u0000\u0000\u030b\u030c\u0005"+ + "l\u0000\u0000\u030c\u030d\u0005l\u0000\u0000\u030d\u0087\u0001\u0000\u0000"+ + "\u0000\u030e\u030f\u0005n\u0000\u0000\u030f\u0310\u0005u\u0000\u0000\u0310"+ + "\u0311\u0005l\u0000\u0000\u0311\u0312\u0005l\u0000\u0000\u0312\u0313\u0005"+ + "s\u0000\u0000\u0313\u0089\u0001\u0000\u0000\u0000\u0314\u0315\u0005o\u0000"+ + "\u0000\u0315\u0316\u0005r\u0000\u0000\u0316\u008b\u0001\u0000\u0000\u0000"+ + "\u0317\u0318\u0005?\u0000\u0000\u0318\u008d\u0001\u0000\u0000\u0000\u0319"+ + "\u031a\u0005r\u0000\u0000\u031a\u031b\u0005l\u0000\u0000\u031b\u031c\u0005"+ + "i\u0000\u0000\u031c\u031d\u0005k\u0000\u0000\u031d\u031e\u0005e\u0000"+ + "\u0000\u031e\u008f\u0001\u0000\u0000\u0000\u031f\u0320\u0005)\u0000\u0000"+ + "\u0320\u0091\u0001\u0000\u0000\u0000\u0321\u0322\u0005t\u0000\u0000\u0322"+ + "\u0323\u0005r\u0000\u0000\u0323\u0324\u0005u\u0000\u0000\u0324\u0325\u0005"+ + "e\u0000\u0000\u0325\u0093\u0001\u0000\u0000\u0000\u0326\u0327\u0005=\u0000"+ + "\u0000\u0327\u0328\u0005=\u0000\u0000\u0328\u0095\u0001\u0000\u0000\u0000"+ + "\u0329\u032a\u0005=\u0000\u0000\u032a\u032b\u0005~\u0000\u0000\u032b\u0097"+ + "\u0001\u0000\u0000\u0000\u032c\u032d\u0005!\u0000\u0000\u032d\u032e\u0005"+ + "=\u0000\u0000\u032e\u0099\u0001\u0000\u0000\u0000\u032f\u0330\u0005<\u0000"+ + "\u0000\u0330\u009b\u0001\u0000\u0000\u0000\u0331\u0332\u0005<\u0000\u0000"+ + "\u0332\u0333\u0005=\u0000\u0000\u0333\u009d\u0001\u0000\u0000\u0000\u0334"+ + "\u0335\u0005>\u0000\u0000\u0335\u009f\u0001\u0000\u0000\u0000\u0336\u0337"+ + "\u0005>\u0000\u0000\u0337\u0338\u0005=\u0000\u0000\u0338\u00a1\u0001\u0000"+ + "\u0000\u0000\u0339\u033a\u0005+\u0000\u0000\u033a\u00a3\u0001\u0000\u0000"+ + "\u0000\u033b\u033c\u0005-\u0000\u0000\u033c\u00a5\u0001\u0000\u0000\u0000"+ + "\u033d\u033e\u0005*\u0000\u0000\u033e\u00a7\u0001\u0000\u0000\u0000\u033f"+ + "\u0340\u0005/\u0000\u0000\u0340\u00a9\u0001\u0000\u0000\u0000\u0341\u0342"+ + "\u0005%\u0000\u0000\u0342\u00ab\u0001\u0000\u0000\u0000\u0343\u0344\u0005"+ + "[\u0000\u0000\u0344\u0345\u0001\u0000\u0000\u0000\u0345\u0346\u0006O\u0000"+ + "\u0000\u0346\u0347\u0006O\u0000\u0000\u0347\u00ad\u0001\u0000\u0000\u0000"+ + "\u0348\u0349\u0005]\u0000\u0000\u0349\u034a\u0001\u0000\u0000\u0000\u034a"+ + "\u034b\u0006P\u000e\u0000\u034b\u034c\u0006P\u000e\u0000\u034c\u00af\u0001"+ + "\u0000\u0000\u0000\u034d\u0351\u0003N \u0000\u034e\u0350\u0003^(\u0000"+ + "\u034f\u034e\u0001\u0000\u0000\u0000\u0350\u0353\u0001\u0000\u0000\u0000"+ + "\u0351\u034f\u0001\u0000\u0000\u0000\u0351\u0352\u0001\u0000\u0000\u0000"+ + "\u0352\u035e\u0001\u0000\u0000\u0000\u0353\u0351\u0001\u0000\u0000\u0000"+ + "\u0354\u0357\u0003\\\'\u0000\u0355\u0357\u0003V$\u0000\u0356\u0354\u0001"+ + "\u0000\u0000\u0000\u0356\u0355\u0001\u0000\u0000\u0000\u0357\u0359\u0001"+ + "\u0000\u0000\u0000\u0358\u035a\u0003^(\u0000\u0359\u0358\u0001\u0000\u0000"+ + "\u0000\u035a\u035b\u0001\u0000\u0000\u0000\u035b\u0359\u0001\u0000\u0000"+ + "\u0000\u035b\u035c\u0001\u0000\u0000\u0000\u035c\u035e\u0001\u0000\u0000"+ + "\u0000\u035d\u034d\u0001\u0000\u0000\u0000\u035d\u0356\u0001\u0000\u0000"+ + "\u0000\u035e\u00b1\u0001\u0000\u0000\u0000\u035f\u0361\u0003X%\u0000\u0360"+ + "\u0362\u0003Z&\u0000\u0361\u0360\u0001\u0000\u0000\u0000\u0362\u0363\u0001"+ + "\u0000\u0000\u0000\u0363\u0361\u0001\u0000\u0000\u0000\u0363\u0364\u0001"+ + "\u0000\u0000\u0000\u0364\u0365\u0001\u0000\u0000\u0000\u0365\u0366\u0003"+ + "X%\u0000\u0366\u00b3\u0001\u0000\u0000\u0000\u0367\u0368\u0003\u00b2R"+ + "\u0000\u0368\u00b5\u0001\u0000\u0000\u0000\u0369\u036a\u00036\u0014\u0000"+ + "\u036a\u036b\u0001\u0000\u0000\u0000\u036b\u036c\u0006T\n\u0000\u036c"+ + "\u00b7\u0001\u0000\u0000\u0000\u036d\u036e\u00038\u0015\u0000\u036e\u036f"+ + "\u0001\u0000\u0000\u0000\u036f\u0370\u0006U\n\u0000\u0370\u00b9\u0001"+ + "\u0000\u0000\u0000\u0371\u0372\u0003:\u0016\u0000\u0372\u0373\u0001\u0000"+ + "\u0000\u0000\u0373\u0374\u0006V\n\u0000\u0374\u00bb\u0001\u0000\u0000"+ + "\u0000\u0375\u0376\u0003J\u001e\u0000\u0376\u0377\u0001\u0000\u0000\u0000"+ + "\u0377\u0378\u0006W\r\u0000\u0378\u0379\u0006W\u000e\u0000\u0379\u00bd"+ + "\u0001\u0000\u0000\u0000\u037a\u037b\u0003\u00acO\u0000\u037b\u037c\u0001"+ + "\u0000\u0000\u0000\u037c\u037d\u0006X\u000b\u0000\u037d\u00bf\u0001\u0000"+ + "\u0000\u0000\u037e\u037f\u0003\u00aeP\u0000\u037f\u0380\u0001\u0000\u0000"+ + "\u0000\u0380\u0381\u0006Y\u000f\u0000\u0381\u00c1\u0001\u0000\u0000\u0000"+ + "\u0382\u0383\u0003p1\u0000\u0383\u0384\u0001\u0000\u0000\u0000\u0384\u0385"+ + "\u0006Z\u0010\u0000\u0385\u00c3\u0001\u0000\u0000\u0000\u0386\u0387\u0003"+ + "l/\u0000\u0387\u0388\u0001\u0000\u0000\u0000\u0388\u0389\u0006[\u0011"+ + "\u0000\u0389\u00c5\u0001\u0000\u0000\u0000\u038a\u038b\u0003`)\u0000\u038b"+ + "\u038c\u0001\u0000\u0000\u0000\u038c\u038d\u0006\\\u0012\u0000\u038d\u00c7"+ + "\u0001\u0000\u0000\u0000\u038e\u038f\u0005o\u0000\u0000\u038f\u0390\u0005"+ + "p\u0000\u0000\u0390\u0391\u0005t\u0000\u0000\u0391\u0392\u0005i\u0000"+ + "\u0000\u0392\u0393\u0005o\u0000\u0000\u0393\u0394\u0005n\u0000\u0000\u0394"+ + "\u0395\u0005s\u0000\u0000\u0395\u00c9\u0001\u0000\u0000\u0000\u0396\u0397"+ + "\u0005m\u0000\u0000\u0397\u0398\u0005e\u0000\u0000\u0398\u0399\u0005t"+ + "\u0000\u0000\u0399\u039a\u0005a\u0000\u0000\u039a\u039b\u0005d\u0000\u0000"+ + "\u039b\u039c\u0005a\u0000\u0000\u039c\u039d\u0005t\u0000\u0000\u039d\u039e"+ + "\u0005a\u0000\u0000\u039e\u00cb\u0001\u0000\u0000\u0000\u039f\u03a0\u0003"+ + ">\u0018\u0000\u03a0\u03a1\u0001\u0000\u0000\u0000\u03a1\u03a2\u0006_\u0013"+ + "\u0000\u03a2\u00cd\u0001\u0000\u0000\u0000\u03a3\u03a4\u00036\u0014\u0000"+ + "\u03a4\u03a5\u0001\u0000\u0000\u0000\u03a5\u03a6\u0006`\n\u0000\u03a6"+ + "\u00cf\u0001\u0000\u0000\u0000\u03a7\u03a8\u00038\u0015\u0000\u03a8\u03a9"+ + "\u0001\u0000\u0000\u0000\u03a9\u03aa\u0006a\n\u0000\u03aa\u00d1\u0001"+ + "\u0000\u0000\u0000\u03ab\u03ac\u0003:\u0016\u0000\u03ac\u03ad\u0001\u0000"+ + "\u0000\u0000\u03ad\u03ae\u0006b\n\u0000\u03ae\u00d3\u0001\u0000\u0000"+ + "\u0000\u03af\u03b0\u0003J\u001e\u0000\u03b0\u03b1\u0001\u0000\u0000\u0000"+ + "\u03b1\u03b2\u0006c\r\u0000\u03b2\u03b3\u0006c\u000e\u0000\u03b3\u00d5"+ + "\u0001\u0000\u0000\u0000\u03b4\u03b5\u0003t3\u0000\u03b5\u03b6\u0001\u0000"+ + "\u0000\u0000\u03b6\u03b7\u0006d\u0014\u0000\u03b7\u00d7\u0001\u0000\u0000"+ + "\u0000\u03b8\u03b9\u0003p1\u0000\u03b9\u03ba\u0001\u0000\u0000\u0000\u03ba"+ + "\u03bb\u0006e\u0010\u0000\u03bb\u00d9\u0001\u0000\u0000\u0000\u03bc\u03c1"+ + "\u0003N \u0000\u03bd\u03c1\u0003L\u001f\u0000\u03be\u03c1\u0003\\\'\u0000"+ + "\u03bf\u03c1\u0003\u00a6L\u0000\u03c0\u03bc\u0001\u0000\u0000\u0000\u03c0"+ + "\u03bd\u0001\u0000\u0000\u0000\u03c0\u03be\u0001\u0000\u0000\u0000\u03c0"+ + "\u03bf\u0001\u0000\u0000\u0000\u03c1\u00db\u0001\u0000\u0000\u0000\u03c2"+ + "\u03c5\u0003N \u0000\u03c3\u03c5\u0003\u00a6L\u0000\u03c4\u03c2\u0001"+ + "\u0000\u0000\u0000\u03c4\u03c3\u0001\u0000\u0000\u0000\u03c5\u03c9\u0001"+ + "\u0000\u0000\u0000\u03c6\u03c8\u0003\u00daf\u0000\u03c7\u03c6\u0001\u0000"+ + "\u0000\u0000\u03c8\u03cb\u0001\u0000\u0000\u0000\u03c9\u03c7\u0001\u0000"+ + "\u0000\u0000\u03c9\u03ca\u0001\u0000\u0000\u0000\u03ca\u03d6\u0001\u0000"+ + "\u0000\u0000\u03cb\u03c9\u0001\u0000\u0000\u0000\u03cc\u03cf\u0003\\\'"+ + "\u0000\u03cd\u03cf\u0003V$\u0000\u03ce\u03cc\u0001\u0000\u0000\u0000\u03ce"+ + "\u03cd\u0001\u0000\u0000\u0000\u03cf\u03d1\u0001\u0000\u0000\u0000\u03d0"+ + "\u03d2\u0003\u00daf\u0000\u03d1\u03d0\u0001\u0000\u0000\u0000\u03d2\u03d3"+ + "\u0001\u0000\u0000\u0000\u03d3\u03d1\u0001\u0000\u0000\u0000\u03d3\u03d4"+ + "\u0001\u0000\u0000\u0000\u03d4\u03d6\u0001\u0000\u0000\u0000\u03d5\u03c4"+ + "\u0001\u0000\u0000\u0000\u03d5\u03ce\u0001\u0000\u0000\u0000\u03d6\u00dd"+ + "\u0001\u0000\u0000\u0000\u03d7\u03da\u0003\u00dcg\u0000\u03d8\u03da\u0003"+ + "\u00b2R\u0000\u03d9\u03d7\u0001\u0000\u0000\u0000\u03d9\u03d8\u0001\u0000"+ + "\u0000\u0000\u03da\u03db\u0001\u0000\u0000\u0000\u03db\u03d9\u0001\u0000"+ + "\u0000\u0000\u03db\u03dc\u0001\u0000\u0000\u0000\u03dc\u00df\u0001\u0000"+ + "\u0000\u0000\u03dd\u03de\u00036\u0014\u0000\u03de\u03df\u0001\u0000\u0000"+ + "\u0000\u03df\u03e0\u0006i\n\u0000\u03e0\u00e1\u0001\u0000\u0000\u0000"+ + "\u03e1\u03e2\u00038\u0015\u0000\u03e2\u03e3\u0001\u0000\u0000\u0000\u03e3"+ + "\u03e4\u0006j\n\u0000\u03e4\u00e3\u0001\u0000\u0000\u0000\u03e5\u03e6"+ + "\u0003:\u0016\u0000\u03e6\u03e7\u0001\u0000\u0000\u0000\u03e7\u03e8\u0006"+ + "k\n\u0000\u03e8\u00e5\u0001\u0000\u0000\u0000\u03e9\u03ea\u0003J\u001e"+ + "\u0000\u03ea\u03eb\u0001\u0000\u0000\u0000\u03eb\u03ec\u0006l\r\u0000"+ + "\u03ec\u03ed\u0006l\u000e\u0000\u03ed\u00e7\u0001\u0000\u0000\u0000\u03ee"+ + "\u03ef\u0003l/\u0000\u03ef\u03f0\u0001\u0000\u0000\u0000\u03f0\u03f1\u0006"+ + "m\u0011\u0000\u03f1\u00e9\u0001\u0000\u0000\u0000\u03f2\u03f3\u0003p1"+ + "\u0000\u03f3\u03f4\u0001\u0000\u0000\u0000\u03f4\u03f5\u0006n\u0010\u0000"+ + "\u03f5\u00eb\u0001\u0000\u0000\u0000\u03f6\u03f7\u0003t3\u0000\u03f7\u03f8"+ + "\u0001\u0000\u0000\u0000\u03f8\u03f9\u0006o\u0014\u0000\u03f9\u00ed\u0001"+ + "\u0000\u0000\u0000\u03fa\u03fb\u0005a\u0000\u0000\u03fb\u03fc\u0005s\u0000"+ + "\u0000\u03fc\u00ef\u0001\u0000\u0000\u0000\u03fd\u03fe\u0003\u00deh\u0000"+ + "\u03fe\u03ff\u0001\u0000\u0000\u0000\u03ff\u0400\u0006q\u0015\u0000\u0400"+ + "\u00f1\u0001\u0000\u0000\u0000\u0401\u0402\u00036\u0014\u0000\u0402\u0403"+ + "\u0001\u0000\u0000\u0000\u0403\u0404\u0006r\n\u0000\u0404\u00f3\u0001"+ + "\u0000\u0000\u0000\u0405\u0406\u00038\u0015\u0000\u0406\u0407\u0001\u0000"+ + "\u0000\u0000\u0407\u0408\u0006s\n\u0000\u0408\u00f5\u0001\u0000\u0000"+ + "\u0000\u0409\u040a\u0003:\u0016\u0000\u040a\u040b\u0001\u0000\u0000\u0000"+ + "\u040b\u040c\u0006t\n\u0000\u040c\u00f7\u0001\u0000\u0000\u0000\u040d"+ + "\u040e\u0003J\u001e\u0000\u040e\u040f\u0001\u0000\u0000\u0000\u040f\u0410"+ + "\u0006u\r\u0000\u0410\u0411\u0006u\u000e\u0000\u0411\u00f9\u0001\u0000"+ + "\u0000\u0000\u0412\u0413\u0003\u00acO\u0000\u0413\u0414\u0001\u0000\u0000"+ + "\u0000\u0414\u0415\u0006v\u000b\u0000\u0415\u0416\u0006v\u0016\u0000\u0416"+ + "\u00fb\u0001\u0000\u0000\u0000\u0417\u0418\u0005o\u0000\u0000\u0418\u0419"+ + "\u0005n\u0000\u0000\u0419\u041a\u0001\u0000\u0000\u0000\u041a\u041b\u0006"+ + "w\u0017\u0000\u041b\u00fd\u0001\u0000\u0000\u0000\u041c\u041d\u0005w\u0000"+ + "\u0000\u041d\u041e\u0005i\u0000\u0000\u041e\u041f\u0005t\u0000\u0000\u041f"+ + "\u0420\u0005h\u0000\u0000\u0420\u0421\u0001\u0000\u0000\u0000\u0421\u0422"+ + "\u0006x\u0017\u0000\u0422\u00ff\u0001\u0000\u0000\u0000\u0423\u0424\b"+ + "\f\u0000\u0000\u0424\u0101\u0001\u0000\u0000\u0000\u0425\u0427\u0003\u0100"+ + "y\u0000\u0426\u0425\u0001\u0000\u0000\u0000\u0427\u0428\u0001\u0000\u0000"+ + "\u0000\u0428\u0426\u0001\u0000\u0000\u0000\u0428\u0429\u0001\u0000\u0000"+ + "\u0000\u0429\u042a\u0001\u0000\u0000\u0000\u042a\u042b\u0003\u0146\u009c"+ + "\u0000\u042b\u042d\u0001\u0000\u0000\u0000\u042c\u0426\u0001\u0000\u0000"+ + "\u0000\u042c\u042d\u0001\u0000\u0000\u0000\u042d\u042f\u0001\u0000\u0000"+ + "\u0000\u042e\u0430\u0003\u0100y\u0000\u042f\u042e\u0001\u0000\u0000\u0000"+ + "\u0430\u0431\u0001\u0000\u0000\u0000\u0431\u042f\u0001\u0000\u0000\u0000"+ + "\u0431\u0432\u0001\u0000\u0000\u0000\u0432\u0103\u0001\u0000\u0000\u0000"+ + "\u0433\u0434\u0003\u00b4S\u0000\u0434\u0435\u0001\u0000\u0000\u0000\u0435"+ + "\u0436\u0006{\u0018\u0000\u0436\u0105\u0001\u0000\u0000\u0000\u0437\u0438"+ + "\u0003\u0102z\u0000\u0438\u0439\u0001\u0000\u0000\u0000\u0439\u043a\u0006"+ + "|\u0019\u0000\u043a\u0107\u0001\u0000\u0000\u0000\u043b\u043c\u00036\u0014"+ + "\u0000\u043c\u043d\u0001\u0000\u0000\u0000\u043d\u043e\u0006}\n\u0000"+ + "\u043e\u0109\u0001\u0000\u0000\u0000\u043f\u0440\u00038\u0015\u0000\u0440"+ + "\u0441\u0001\u0000\u0000\u0000\u0441\u0442\u0006~\n\u0000\u0442\u010b"+ + "\u0001\u0000\u0000\u0000\u0443\u0444\u0003:\u0016\u0000\u0444\u0445\u0001"+ + "\u0000\u0000\u0000\u0445\u0446\u0006\u007f\n\u0000\u0446\u010d\u0001\u0000"+ + "\u0000\u0000\u0447\u0448\u0003J\u001e\u0000\u0448\u0449\u0001\u0000\u0000"+ + "\u0000\u0449\u044a\u0006\u0080\r\u0000\u044a\u044b\u0006\u0080\u000e\u0000"+ + "\u044b\u044c\u0006\u0080\u000e\u0000\u044c\u010f\u0001\u0000\u0000\u0000"+ + "\u044d\u044e\u0003l/\u0000\u044e\u044f\u0001\u0000\u0000\u0000\u044f\u0450"+ + "\u0006\u0081\u0011\u0000\u0450\u0111\u0001\u0000\u0000\u0000\u0451\u0452"+ + "\u0003p1\u0000\u0452\u0453\u0001\u0000\u0000\u0000\u0453\u0454\u0006\u0082"+ + "\u0010\u0000\u0454\u0113\u0001\u0000\u0000\u0000\u0455\u0456\u0003t3\u0000"+ + "\u0456\u0457\u0001\u0000\u0000\u0000\u0457\u0458\u0006\u0083\u0014\u0000"+ + "\u0458\u0115\u0001\u0000\u0000\u0000\u0459\u045a\u0003\u00fex\u0000\u045a"+ + "\u045b\u0001\u0000\u0000\u0000\u045b\u045c\u0006\u0084\u001a\u0000\u045c"+ + "\u0117\u0001\u0000\u0000\u0000\u045d\u045e\u0003\u00deh\u0000\u045e\u045f"+ + "\u0001\u0000\u0000\u0000\u045f\u0460\u0006\u0085\u0015\u0000\u0460\u0119"+ + "\u0001\u0000\u0000\u0000\u0461\u0462\u0003\u00b4S\u0000\u0462\u0463\u0001"+ + "\u0000\u0000\u0000\u0463\u0464\u0006\u0086\u0018\u0000\u0464\u011b\u0001"+ + "\u0000\u0000\u0000\u0465\u0466\u00036\u0014\u0000\u0466\u0467\u0001\u0000"+ + "\u0000\u0000\u0467\u0468\u0006\u0087\n\u0000\u0468\u011d\u0001\u0000\u0000"+ + "\u0000\u0469\u046a\u00038\u0015\u0000\u046a\u046b\u0001\u0000\u0000\u0000"+ + "\u046b\u046c\u0006\u0088\n\u0000\u046c\u011f\u0001\u0000\u0000\u0000\u046d"+ + "\u046e\u0003:\u0016\u0000\u046e\u046f\u0001\u0000\u0000\u0000\u046f\u0470"+ + "\u0006\u0089\n\u0000\u0470\u0121\u0001\u0000\u0000\u0000\u0471\u0472\u0003"+ + "J\u001e\u0000\u0472\u0473\u0001\u0000\u0000\u0000\u0473\u0474\u0006\u008a"+ + "\r\u0000\u0474\u0475\u0006\u008a\u000e\u0000\u0475\u0123\u0001\u0000\u0000"+ + "\u0000\u0476\u0477\u0003t3\u0000\u0477\u0478\u0001\u0000\u0000\u0000\u0478"+ + "\u0479\u0006\u008b\u0014\u0000\u0479\u0125\u0001\u0000\u0000\u0000\u047a"+ + "\u047b\u0003\u00b4S\u0000\u047b\u047c\u0001\u0000\u0000\u0000\u047c\u047d"+ + "\u0006\u008c\u0018\u0000\u047d\u0127\u0001\u0000\u0000\u0000\u047e\u047f"+ + "\u0003\u00b0Q\u0000\u047f\u0480\u0001\u0000\u0000\u0000\u0480\u0481\u0006"+ + "\u008d\u001b\u0000\u0481\u0129\u0001\u0000\u0000\u0000\u0482\u0483\u0003"+ + "6\u0014\u0000\u0483\u0484\u0001\u0000\u0000\u0000\u0484\u0485\u0006\u008e"+ + "\n\u0000\u0485\u012b\u0001\u0000\u0000\u0000\u0486\u0487\u00038\u0015"+ + "\u0000\u0487\u0488\u0001\u0000\u0000\u0000\u0488\u0489\u0006\u008f\n\u0000"+ + "\u0489\u012d\u0001\u0000\u0000\u0000\u048a\u048b\u0003:\u0016\u0000\u048b"+ + "\u048c\u0001\u0000\u0000\u0000\u048c\u048d\u0006\u0090\n\u0000\u048d\u012f"+ + "\u0001\u0000\u0000\u0000\u048e\u048f\u0003J\u001e\u0000\u048f\u0490\u0001"+ + "\u0000\u0000\u0000\u0490\u0491\u0006\u0091\r\u0000\u0491\u0492\u0006\u0091"+ + "\u000e\u0000\u0492\u0131\u0001\u0000\u0000\u0000\u0493\u0494\u0005i\u0000"+ + "\u0000\u0494\u0495\u0005n\u0000\u0000\u0495\u0496\u0005f\u0000\u0000\u0496"+ + "\u0497\u0005o\u0000\u0000\u0497\u0133\u0001\u0000\u0000\u0000\u0498\u0499"+ + "\u00036\u0014\u0000\u0499\u049a\u0001\u0000\u0000\u0000\u049a\u049b\u0006"+ + "\u0093\n\u0000\u049b\u0135\u0001\u0000\u0000\u0000\u049c\u049d\u00038"+ + "\u0015\u0000\u049d\u049e\u0001\u0000\u0000\u0000\u049e\u049f\u0006\u0094"+ + "\n\u0000\u049f\u0137\u0001\u0000\u0000\u0000\u04a0\u04a1\u0003:\u0016"+ + "\u0000\u04a1\u04a2\u0001\u0000\u0000\u0000\u04a2\u04a3\u0006\u0095\n\u0000"+ + "\u04a3\u0139\u0001\u0000\u0000\u0000\u04a4\u04a5\u0003J\u001e\u0000\u04a5"+ + "\u04a6\u0001\u0000\u0000\u0000\u04a6\u04a7\u0006\u0096\r\u0000\u04a7\u04a8"+ + "\u0006\u0096\u000e\u0000\u04a8\u013b\u0001\u0000\u0000\u0000\u04a9\u04aa"+ + "\u0005f\u0000\u0000\u04aa\u04ab\u0005u\u0000\u0000\u04ab\u04ac\u0005n"+ + "\u0000\u0000\u04ac\u04ad\u0005c\u0000\u0000\u04ad\u04ae\u0005t\u0000\u0000"+ + "\u04ae\u04af\u0005i\u0000\u0000\u04af\u04b0\u0005o\u0000\u0000\u04b0\u04b1"+ + "\u0005n\u0000\u0000\u04b1\u04b2\u0005s\u0000\u0000\u04b2\u013d\u0001\u0000"+ + "\u0000\u0000\u04b3\u04b4\u00036\u0014\u0000\u04b4\u04b5\u0001\u0000\u0000"+ + "\u0000\u04b5\u04b6\u0006\u0098\n\u0000\u04b6\u013f\u0001\u0000\u0000\u0000"+ + "\u04b7\u04b8\u00038\u0015\u0000\u04b8\u04b9\u0001\u0000\u0000\u0000\u04b9"+ + "\u04ba\u0006\u0099\n\u0000\u04ba\u0141\u0001\u0000\u0000\u0000\u04bb\u04bc"+ + "\u0003:\u0016\u0000\u04bc\u04bd\u0001\u0000\u0000\u0000\u04bd\u04be\u0006"+ + "\u009a\n\u0000\u04be\u0143\u0001\u0000\u0000\u0000\u04bf\u04c0\u0003\u00ae"+ + "P\u0000\u04c0\u04c1\u0001\u0000\u0000\u0000\u04c1\u04c2\u0006\u009b\u000f"+ + "\u0000\u04c2\u04c3\u0006\u009b\u000e\u0000\u04c3\u0145\u0001\u0000\u0000"+ + "\u0000\u04c4\u04c5\u0005:\u0000\u0000\u04c5\u0147\u0001\u0000\u0000\u0000"+ + "\u04c6\u04cc\u0003V$\u0000\u04c7\u04cc\u0003L\u001f\u0000\u04c8\u04cc"+ + "\u0003t3\u0000\u04c9\u04cc\u0003N \u0000\u04ca\u04cc\u0003\\\'\u0000\u04cb"+ + "\u04c6\u0001\u0000\u0000\u0000\u04cb\u04c7\u0001\u0000\u0000\u0000\u04cb"+ + "\u04c8\u0001\u0000\u0000\u0000\u04cb\u04c9\u0001\u0000\u0000\u0000\u04cb"+ + "\u04ca\u0001\u0000\u0000\u0000\u04cc\u04cd\u0001\u0000\u0000\u0000\u04cd"+ + "\u04cb\u0001\u0000\u0000\u0000\u04cd\u04ce\u0001\u0000\u0000\u0000\u04ce"+ + "\u0149\u0001\u0000\u0000\u0000\u04cf\u04d0\u00036\u0014\u0000\u04d0\u04d1"+ + "\u0001\u0000\u0000\u0000\u04d1\u04d2\u0006\u009e\n\u0000\u04d2\u014b\u0001"+ + "\u0000\u0000\u0000\u04d3\u04d4\u00038\u0015\u0000\u04d4\u04d5\u0001\u0000"+ + "\u0000\u0000\u04d5\u04d6\u0006\u009f\n\u0000\u04d6\u014d\u0001\u0000\u0000"+ + "\u0000\u04d7\u04d8\u0003:\u0016\u0000\u04d8\u04d9\u0001\u0000\u0000\u0000"+ + "\u04d9\u04da\u0006\u00a0\n\u0000\u04da\u014f\u0001\u0000\u0000\u0000\u04db"+ + "\u04dc\u0003J\u001e\u0000\u04dc\u04dd\u0001\u0000\u0000\u0000\u04dd\u04de"+ + "\u0006\u00a1\r\u0000\u04de\u04df\u0006\u00a1\u000e\u0000\u04df\u0151\u0001"+ + "\u0000\u0000\u0000\u04e0\u04e1\u0003>\u0018\u0000\u04e1\u04e2\u0001\u0000"+ + "\u0000\u0000\u04e2\u04e3\u0006\u00a2\u0013\u0000\u04e3\u04e4\u0006\u00a2"+ + "\u000e\u0000\u04e4\u04e5\u0006\u00a2\u001c\u0000\u04e5\u0153\u0001\u0000"+ + "\u0000\u0000\u04e6\u04e7\u00036\u0014\u0000\u04e7\u04e8\u0001\u0000\u0000"+ + "\u0000\u04e8\u04e9\u0006\u00a3\n\u0000\u04e9\u0155\u0001\u0000\u0000\u0000"+ + "\u04ea\u04eb\u00038\u0015\u0000\u04eb\u04ec\u0001\u0000\u0000\u0000\u04ec"+ + "\u04ed\u0006\u00a4\n\u0000\u04ed\u0157\u0001\u0000\u0000\u0000\u04ee\u04ef"+ + "\u0003:\u0016\u0000\u04ef\u04f0\u0001\u0000\u0000\u0000\u04f0\u04f1\u0006"+ + "\u00a5\n\u0000\u04f1\u0159\u0001\u0000\u0000\u0000\u04f2\u04f3\u0003p"+ + "1\u0000\u04f3\u04f4\u0001\u0000\u0000\u0000\u04f4\u04f5\u0006\u00a6\u0010"+ + "\u0000\u04f5\u04f6\u0006\u00a6\u000e\u0000\u04f6\u04f7\u0006\u00a6\u0006"+ + "\u0000\u04f7\u015b\u0001\u0000\u0000\u0000\u04f8\u04f9\u00036\u0014\u0000"+ + "\u04f9\u04fa\u0001\u0000\u0000\u0000\u04fa\u04fb\u0006\u00a7\n\u0000\u04fb"+ + "\u015d\u0001\u0000\u0000\u0000\u04fc\u04fd\u00038\u0015\u0000\u04fd\u04fe"+ + "\u0001\u0000\u0000\u0000\u04fe\u04ff\u0006\u00a8\n\u0000\u04ff\u015f\u0001"+ + "\u0000\u0000\u0000\u0500\u0501\u0003:\u0016\u0000\u0501\u0502\u0001\u0000"+ + "\u0000\u0000\u0502\u0503\u0006\u00a9\n\u0000\u0503\u0161\u0001\u0000\u0000"+ + "\u0000\u0504\u0505\u0003\u00b4S\u0000\u0505\u0506\u0001\u0000\u0000\u0000"+ + "\u0506\u0507\u0006\u00aa\u000e\u0000\u0507\u0508\u0006\u00aa\u0000\u0000"+ + "\u0508\u0509\u0006\u00aa\u0018\u0000\u0509\u0163\u0001\u0000\u0000\u0000"+ + "\u050a\u050b\u0003\u00b0Q\u0000\u050b\u050c\u0001\u0000\u0000\u0000\u050c"+ + "\u050d\u0006\u00ab\u000e\u0000\u050d\u050e\u0006\u00ab\u0000\u0000\u050e"+ + "\u050f\u0006\u00ab\u001b\u0000\u050f\u0165\u0001\u0000\u0000\u0000\u0510"+ + "\u0511\u0003f,\u0000\u0511\u0512\u0001\u0000\u0000\u0000\u0512\u0513\u0006"+ + "\u00ac\u000e\u0000\u0513\u0514\u0006\u00ac\u0000\u0000\u0514\u0515\u0006"+ + "\u00ac\u001d\u0000\u0515\u0167\u0001\u0000\u0000\u0000\u0516\u0517\u0003"+ + "J\u001e\u0000\u0517\u0518\u0001\u0000\u0000\u0000\u0518\u0519\u0006\u00ad"+ + "\r\u0000\u0519\u051a\u0006\u00ad\u000e\u0000\u051a\u0169\u0001\u0000\u0000"+ + "\u0000<\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f"+ + "\r\u020d\u0217\u021b\u021e\u0227\u0229\u0234\u023b\u0240\u0267\u026c\u0275"+ + "\u027c\u0281\u0283\u028e\u0296\u0299\u029b\u02a0\u02a5\u02ab\u02b2\u02b7"+ + "\u02bd\u02c0\u02c8\u02cc\u0351\u0356\u035b\u035d\u0363\u03c0\u03c4\u03c9"+ + "\u03ce\u03d3\u03d5\u03d9\u03db\u0428\u042c\u0431\u04cb\u04cd\u001e\u0005"+ + "\u0002\u0000\u0005\u0004\u0000\u0005\u0006\u0000\u0005\u0001\u0000\u0005"+ + "\u0003\u0000\u0005\n\u0000\u0005\f\u0000\u0005\b\u0000\u0005\u0005\u0000"+ + "\u0005\t\u0000\u0000\u0001\u0000\u0007C\u0000\u0005\u0000\u0000\u0007"+ + "\u001c\u0000\u0004\u0000\u0000\u0007D\u0000\u0007%\u0000\u0007#\u0000"+ + "\u0007\u001d\u0000\u0007\u0018\u0000\u0007\'\u0000\u0007O\u0000\u0005"+ + "\u000b\u0000\u0005\u0007\u0000\u0007F\u0000\u0007Y\u0000\u0007X\u0000"+ + "\u0007E\u0000\u0005\r\u0000\u0007 \u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index 2b887065985d..461605d5f023 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -11,6 +11,7 @@ null 'keep' 'limit' 'meta' +'metrics' 'mv_expand' 'rename' 'row' @@ -25,6 +26,7 @@ null null null null +null '|' null null @@ -80,7 +82,6 @@ null null null null -null 'as' null null @@ -110,6 +111,12 @@ null null null null +null +null +null +null +null +null token symbolic names: null @@ -124,6 +131,7 @@ INLINESTATS KEEP LIMIT META +METRICS MV_EXPAND RENAME ROW @@ -135,6 +143,7 @@ UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT WS +INDEX_UNQUOTED_IDENTIFIER EXPLAIN_WS EXPLAIN_LINE_COMMENT EXPLAIN_MULTILINE_COMMENT @@ -186,7 +195,6 @@ EXPR_MULTILINE_COMMENT EXPR_WS OPTIONS METADATA -FROM_UNQUOTED_IDENTIFIER FROM_LINE_COMMENT FROM_MULTILINE_COMMENT FROM_WS @@ -223,6 +231,12 @@ SETTING SETTING_LINE_COMMENT SETTTING_MULTILINE_COMMENT SETTING_WS +METRICS_LINE_COMMENT +METRICS_MULTILINE_COMMENT +METRICS_WS +CLOSING_METRICS_LINE_COMMENT +CLOSING_METRICS_MULTILINE_COMMENT +CLOSING_METRICS_WS rule names: singleStatement @@ -241,12 +255,13 @@ rowCommand fields field fromCommand -fromIdentifier +indexIdentifier fromOptions configOption metadata metadataOption deprecated_metadata +metricsCommand evalCommand statsCommand inlinestatsCommand @@ -282,4 +297,4 @@ enrichWithClause atn: -[4, 1, 110, 543, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 118, 8, 1, 10, 1, 12, 1, 121, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 128, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 143, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 155, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 162, 8, 5, 10, 5, 12, 5, 165, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 172, 8, 5, 1, 5, 1, 5, 3, 5, 176, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 184, 8, 5, 10, 5, 12, 5, 187, 9, 5, 1, 6, 1, 6, 3, 6, 191, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 198, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 203, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 210, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 216, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 224, 8, 8, 10, 8, 12, 8, 227, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 237, 8, 9, 1, 9, 1, 9, 1, 9, 5, 9, 242, 8, 9, 10, 9, 12, 9, 245, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 253, 8, 10, 10, 10, 12, 10, 256, 9, 10, 3, 10, 258, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 5, 13, 270, 8, 13, 10, 13, 12, 13, 273, 9, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 3, 14, 280, 8, 14, 1, 15, 1, 15, 1, 15, 1, 15, 5, 15, 286, 8, 15, 10, 15, 12, 15, 289, 9, 15, 1, 15, 3, 15, 292, 8, 15, 1, 15, 3, 15, 295, 8, 15, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 303, 8, 17, 10, 17, 12, 17, 306, 9, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 3, 19, 314, 8, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 320, 8, 20, 10, 20, 12, 20, 323, 9, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 3, 23, 334, 8, 23, 1, 23, 1, 23, 3, 23, 338, 8, 23, 1, 24, 1, 24, 1, 24, 1, 24, 3, 24, 344, 8, 24, 1, 25, 1, 25, 1, 25, 5, 25, 349, 8, 25, 10, 25, 12, 25, 352, 9, 25, 1, 26, 1, 26, 1, 26, 5, 26, 357, 8, 26, 10, 26, 12, 26, 360, 9, 26, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 379, 8, 29, 10, 29, 12, 29, 382, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 390, 8, 29, 10, 29, 12, 29, 393, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 401, 8, 29, 10, 29, 12, 29, 404, 9, 29, 1, 29, 1, 29, 3, 29, 408, 8, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 417, 8, 31, 10, 31, 12, 31, 420, 9, 31, 1, 32, 1, 32, 3, 32, 424, 8, 32, 1, 32, 1, 32, 3, 32, 428, 8, 32, 1, 33, 1, 33, 1, 33, 1, 33, 5, 33, 434, 8, 33, 10, 33, 12, 33, 437, 9, 33, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 443, 8, 34, 10, 34, 12, 34, 446, 9, 34, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 452, 8, 35, 10, 35, 12, 35, 455, 9, 35, 1, 36, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 465, 8, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 5, 40, 477, 8, 40, 10, 40, 12, 40, 480, 9, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 43, 1, 43, 3, 43, 490, 8, 43, 1, 44, 3, 44, 493, 8, 44, 1, 44, 1, 44, 1, 45, 3, 45, 498, 8, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 3, 52, 523, 8, 52, 1, 52, 1, 52, 1, 52, 1, 52, 5, 52, 529, 8, 52, 10, 52, 12, 52, 532, 9, 52, 3, 52, 534, 8, 52, 1, 53, 1, 53, 1, 53, 3, 53, 539, 8, 53, 1, 53, 1, 53, 1, 53, 0, 4, 2, 10, 16, 18, 54, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 0, 7, 1, 0, 60, 61, 1, 0, 62, 64, 1, 0, 67, 68, 2, 0, 32, 32, 36, 36, 1, 0, 39, 40, 2, 0, 38, 38, 52, 52, 2, 0, 53, 53, 55, 59, 568, 0, 108, 1, 0, 0, 0, 2, 111, 1, 0, 0, 0, 4, 127, 1, 0, 0, 0, 6, 142, 1, 0, 0, 0, 8, 144, 1, 0, 0, 0, 10, 175, 1, 0, 0, 0, 12, 202, 1, 0, 0, 0, 14, 209, 1, 0, 0, 0, 16, 215, 1, 0, 0, 0, 18, 236, 1, 0, 0, 0, 20, 246, 1, 0, 0, 0, 22, 261, 1, 0, 0, 0, 24, 263, 1, 0, 0, 0, 26, 266, 1, 0, 0, 0, 28, 279, 1, 0, 0, 0, 30, 281, 1, 0, 0, 0, 32, 296, 1, 0, 0, 0, 34, 298, 1, 0, 0, 0, 36, 307, 1, 0, 0, 0, 38, 313, 1, 0, 0, 0, 40, 315, 1, 0, 0, 0, 42, 324, 1, 0, 0, 0, 44, 328, 1, 0, 0, 0, 46, 331, 1, 0, 0, 0, 48, 339, 1, 0, 0, 0, 50, 345, 1, 0, 0, 0, 52, 353, 1, 0, 0, 0, 54, 361, 1, 0, 0, 0, 56, 363, 1, 0, 0, 0, 58, 407, 1, 0, 0, 0, 60, 409, 1, 0, 0, 0, 62, 412, 1, 0, 0, 0, 64, 421, 1, 0, 0, 0, 66, 429, 1, 0, 0, 0, 68, 438, 1, 0, 0, 0, 70, 447, 1, 0, 0, 0, 72, 456, 1, 0, 0, 0, 74, 460, 1, 0, 0, 0, 76, 466, 1, 0, 0, 0, 78, 470, 1, 0, 0, 0, 80, 473, 1, 0, 0, 0, 82, 481, 1, 0, 0, 0, 84, 485, 1, 0, 0, 0, 86, 489, 1, 0, 0, 0, 88, 492, 1, 0, 0, 0, 90, 497, 1, 0, 0, 0, 92, 501, 1, 0, 0, 0, 94, 503, 1, 0, 0, 0, 96, 505, 1, 0, 0, 0, 98, 508, 1, 0, 0, 0, 100, 512, 1, 0, 0, 0, 102, 515, 1, 0, 0, 0, 104, 518, 1, 0, 0, 0, 106, 538, 1, 0, 0, 0, 108, 109, 3, 2, 1, 0, 109, 110, 5, 0, 0, 1, 110, 1, 1, 0, 0, 0, 111, 112, 6, 1, -1, 0, 112, 113, 3, 4, 2, 0, 113, 119, 1, 0, 0, 0, 114, 115, 10, 1, 0, 0, 115, 116, 5, 26, 0, 0, 116, 118, 3, 6, 3, 0, 117, 114, 1, 0, 0, 0, 118, 121, 1, 0, 0, 0, 119, 117, 1, 0, 0, 0, 119, 120, 1, 0, 0, 0, 120, 3, 1, 0, 0, 0, 121, 119, 1, 0, 0, 0, 122, 128, 3, 96, 48, 0, 123, 128, 3, 30, 15, 0, 124, 128, 3, 24, 12, 0, 125, 128, 3, 100, 50, 0, 126, 128, 3, 102, 51, 0, 127, 122, 1, 0, 0, 0, 127, 123, 1, 0, 0, 0, 127, 124, 1, 0, 0, 0, 127, 125, 1, 0, 0, 0, 127, 126, 1, 0, 0, 0, 128, 5, 1, 0, 0, 0, 129, 143, 3, 44, 22, 0, 130, 143, 3, 48, 24, 0, 131, 143, 3, 60, 30, 0, 132, 143, 3, 66, 33, 0, 133, 143, 3, 62, 31, 0, 134, 143, 3, 46, 23, 0, 135, 143, 3, 8, 4, 0, 136, 143, 3, 68, 34, 0, 137, 143, 3, 70, 35, 0, 138, 143, 3, 74, 37, 0, 139, 143, 3, 76, 38, 0, 140, 143, 3, 104, 52, 0, 141, 143, 3, 78, 39, 0, 142, 129, 1, 0, 0, 0, 142, 130, 1, 0, 0, 0, 142, 131, 1, 0, 0, 0, 142, 132, 1, 0, 0, 0, 142, 133, 1, 0, 0, 0, 142, 134, 1, 0, 0, 0, 142, 135, 1, 0, 0, 0, 142, 136, 1, 0, 0, 0, 142, 137, 1, 0, 0, 0, 142, 138, 1, 0, 0, 0, 142, 139, 1, 0, 0, 0, 142, 140, 1, 0, 0, 0, 142, 141, 1, 0, 0, 0, 143, 7, 1, 0, 0, 0, 144, 145, 5, 18, 0, 0, 145, 146, 3, 10, 5, 0, 146, 9, 1, 0, 0, 0, 147, 148, 6, 5, -1, 0, 148, 149, 5, 45, 0, 0, 149, 176, 3, 10, 5, 7, 150, 176, 3, 14, 7, 0, 151, 176, 3, 12, 6, 0, 152, 154, 3, 14, 7, 0, 153, 155, 5, 45, 0, 0, 154, 153, 1, 0, 0, 0, 154, 155, 1, 0, 0, 0, 155, 156, 1, 0, 0, 0, 156, 157, 5, 42, 0, 0, 157, 158, 5, 41, 0, 0, 158, 163, 3, 14, 7, 0, 159, 160, 5, 35, 0, 0, 160, 162, 3, 14, 7, 0, 161, 159, 1, 0, 0, 0, 162, 165, 1, 0, 0, 0, 163, 161, 1, 0, 0, 0, 163, 164, 1, 0, 0, 0, 164, 166, 1, 0, 0, 0, 165, 163, 1, 0, 0, 0, 166, 167, 5, 51, 0, 0, 167, 176, 1, 0, 0, 0, 168, 169, 3, 14, 7, 0, 169, 171, 5, 43, 0, 0, 170, 172, 5, 45, 0, 0, 171, 170, 1, 0, 0, 0, 171, 172, 1, 0, 0, 0, 172, 173, 1, 0, 0, 0, 173, 174, 5, 46, 0, 0, 174, 176, 1, 0, 0, 0, 175, 147, 1, 0, 0, 0, 175, 150, 1, 0, 0, 0, 175, 151, 1, 0, 0, 0, 175, 152, 1, 0, 0, 0, 175, 168, 1, 0, 0, 0, 176, 185, 1, 0, 0, 0, 177, 178, 10, 4, 0, 0, 178, 179, 5, 31, 0, 0, 179, 184, 3, 10, 5, 5, 180, 181, 10, 3, 0, 0, 181, 182, 5, 48, 0, 0, 182, 184, 3, 10, 5, 4, 183, 177, 1, 0, 0, 0, 183, 180, 1, 0, 0, 0, 184, 187, 1, 0, 0, 0, 185, 183, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 11, 1, 0, 0, 0, 187, 185, 1, 0, 0, 0, 188, 190, 3, 14, 7, 0, 189, 191, 5, 45, 0, 0, 190, 189, 1, 0, 0, 0, 190, 191, 1, 0, 0, 0, 191, 192, 1, 0, 0, 0, 192, 193, 5, 44, 0, 0, 193, 194, 3, 92, 46, 0, 194, 203, 1, 0, 0, 0, 195, 197, 3, 14, 7, 0, 196, 198, 5, 45, 0, 0, 197, 196, 1, 0, 0, 0, 197, 198, 1, 0, 0, 0, 198, 199, 1, 0, 0, 0, 199, 200, 5, 50, 0, 0, 200, 201, 3, 92, 46, 0, 201, 203, 1, 0, 0, 0, 202, 188, 1, 0, 0, 0, 202, 195, 1, 0, 0, 0, 203, 13, 1, 0, 0, 0, 204, 210, 3, 16, 8, 0, 205, 206, 3, 16, 8, 0, 206, 207, 3, 94, 47, 0, 207, 208, 3, 16, 8, 0, 208, 210, 1, 0, 0, 0, 209, 204, 1, 0, 0, 0, 209, 205, 1, 0, 0, 0, 210, 15, 1, 0, 0, 0, 211, 212, 6, 8, -1, 0, 212, 216, 3, 18, 9, 0, 213, 214, 7, 0, 0, 0, 214, 216, 3, 16, 8, 3, 215, 211, 1, 0, 0, 0, 215, 213, 1, 0, 0, 0, 216, 225, 1, 0, 0, 0, 217, 218, 10, 2, 0, 0, 218, 219, 7, 1, 0, 0, 219, 224, 3, 16, 8, 3, 220, 221, 10, 1, 0, 0, 221, 222, 7, 0, 0, 0, 222, 224, 3, 16, 8, 2, 223, 217, 1, 0, 0, 0, 223, 220, 1, 0, 0, 0, 224, 227, 1, 0, 0, 0, 225, 223, 1, 0, 0, 0, 225, 226, 1, 0, 0, 0, 226, 17, 1, 0, 0, 0, 227, 225, 1, 0, 0, 0, 228, 229, 6, 9, -1, 0, 229, 237, 3, 58, 29, 0, 230, 237, 3, 50, 25, 0, 231, 237, 3, 20, 10, 0, 232, 233, 5, 41, 0, 0, 233, 234, 3, 10, 5, 0, 234, 235, 5, 51, 0, 0, 235, 237, 1, 0, 0, 0, 236, 228, 1, 0, 0, 0, 236, 230, 1, 0, 0, 0, 236, 231, 1, 0, 0, 0, 236, 232, 1, 0, 0, 0, 237, 243, 1, 0, 0, 0, 238, 239, 10, 1, 0, 0, 239, 240, 5, 34, 0, 0, 240, 242, 3, 22, 11, 0, 241, 238, 1, 0, 0, 0, 242, 245, 1, 0, 0, 0, 243, 241, 1, 0, 0, 0, 243, 244, 1, 0, 0, 0, 244, 19, 1, 0, 0, 0, 245, 243, 1, 0, 0, 0, 246, 247, 3, 54, 27, 0, 247, 257, 5, 41, 0, 0, 248, 258, 5, 62, 0, 0, 249, 254, 3, 10, 5, 0, 250, 251, 5, 35, 0, 0, 251, 253, 3, 10, 5, 0, 252, 250, 1, 0, 0, 0, 253, 256, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 254, 255, 1, 0, 0, 0, 255, 258, 1, 0, 0, 0, 256, 254, 1, 0, 0, 0, 257, 248, 1, 0, 0, 0, 257, 249, 1, 0, 0, 0, 257, 258, 1, 0, 0, 0, 258, 259, 1, 0, 0, 0, 259, 260, 5, 51, 0, 0, 260, 21, 1, 0, 0, 0, 261, 262, 3, 54, 27, 0, 262, 23, 1, 0, 0, 0, 263, 264, 5, 14, 0, 0, 264, 265, 3, 26, 13, 0, 265, 25, 1, 0, 0, 0, 266, 271, 3, 28, 14, 0, 267, 268, 5, 35, 0, 0, 268, 270, 3, 28, 14, 0, 269, 267, 1, 0, 0, 0, 270, 273, 1, 0, 0, 0, 271, 269, 1, 0, 0, 0, 271, 272, 1, 0, 0, 0, 272, 27, 1, 0, 0, 0, 273, 271, 1, 0, 0, 0, 274, 280, 3, 10, 5, 0, 275, 276, 3, 50, 25, 0, 276, 277, 5, 33, 0, 0, 277, 278, 3, 10, 5, 0, 278, 280, 1, 0, 0, 0, 279, 274, 1, 0, 0, 0, 279, 275, 1, 0, 0, 0, 280, 29, 1, 0, 0, 0, 281, 282, 5, 6, 0, 0, 282, 287, 3, 32, 16, 0, 283, 284, 5, 35, 0, 0, 284, 286, 3, 32, 16, 0, 285, 283, 1, 0, 0, 0, 286, 289, 1, 0, 0, 0, 287, 285, 1, 0, 0, 0, 287, 288, 1, 0, 0, 0, 288, 291, 1, 0, 0, 0, 289, 287, 1, 0, 0, 0, 290, 292, 3, 38, 19, 0, 291, 290, 1, 0, 0, 0, 291, 292, 1, 0, 0, 0, 292, 294, 1, 0, 0, 0, 293, 295, 3, 34, 17, 0, 294, 293, 1, 0, 0, 0, 294, 295, 1, 0, 0, 0, 295, 31, 1, 0, 0, 0, 296, 297, 5, 74, 0, 0, 297, 33, 1, 0, 0, 0, 298, 299, 5, 72, 0, 0, 299, 304, 3, 36, 18, 0, 300, 301, 5, 35, 0, 0, 301, 303, 3, 36, 18, 0, 302, 300, 1, 0, 0, 0, 303, 306, 1, 0, 0, 0, 304, 302, 1, 0, 0, 0, 304, 305, 1, 0, 0, 0, 305, 35, 1, 0, 0, 0, 306, 304, 1, 0, 0, 0, 307, 308, 3, 92, 46, 0, 308, 309, 5, 33, 0, 0, 309, 310, 3, 92, 46, 0, 310, 37, 1, 0, 0, 0, 311, 314, 3, 40, 20, 0, 312, 314, 3, 42, 21, 0, 313, 311, 1, 0, 0, 0, 313, 312, 1, 0, 0, 0, 314, 39, 1, 0, 0, 0, 315, 316, 5, 73, 0, 0, 316, 321, 3, 32, 16, 0, 317, 318, 5, 35, 0, 0, 318, 320, 3, 32, 16, 0, 319, 317, 1, 0, 0, 0, 320, 323, 1, 0, 0, 0, 321, 319, 1, 0, 0, 0, 321, 322, 1, 0, 0, 0, 322, 41, 1, 0, 0, 0, 323, 321, 1, 0, 0, 0, 324, 325, 5, 65, 0, 0, 325, 326, 3, 40, 20, 0, 326, 327, 5, 66, 0, 0, 327, 43, 1, 0, 0, 0, 328, 329, 5, 4, 0, 0, 329, 330, 3, 26, 13, 0, 330, 45, 1, 0, 0, 0, 331, 333, 5, 17, 0, 0, 332, 334, 3, 26, 13, 0, 333, 332, 1, 0, 0, 0, 333, 334, 1, 0, 0, 0, 334, 337, 1, 0, 0, 0, 335, 336, 5, 30, 0, 0, 336, 338, 3, 26, 13, 0, 337, 335, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 47, 1, 0, 0, 0, 339, 340, 5, 8, 0, 0, 340, 343, 3, 26, 13, 0, 341, 342, 5, 30, 0, 0, 342, 344, 3, 26, 13, 0, 343, 341, 1, 0, 0, 0, 343, 344, 1, 0, 0, 0, 344, 49, 1, 0, 0, 0, 345, 350, 3, 54, 27, 0, 346, 347, 5, 37, 0, 0, 347, 349, 3, 54, 27, 0, 348, 346, 1, 0, 0, 0, 349, 352, 1, 0, 0, 0, 350, 348, 1, 0, 0, 0, 350, 351, 1, 0, 0, 0, 351, 51, 1, 0, 0, 0, 352, 350, 1, 0, 0, 0, 353, 358, 3, 56, 28, 0, 354, 355, 5, 37, 0, 0, 355, 357, 3, 56, 28, 0, 356, 354, 1, 0, 0, 0, 357, 360, 1, 0, 0, 0, 358, 356, 1, 0, 0, 0, 358, 359, 1, 0, 0, 0, 359, 53, 1, 0, 0, 0, 360, 358, 1, 0, 0, 0, 361, 362, 7, 2, 0, 0, 362, 55, 1, 0, 0, 0, 363, 364, 5, 78, 0, 0, 364, 57, 1, 0, 0, 0, 365, 408, 5, 46, 0, 0, 366, 367, 3, 90, 45, 0, 367, 368, 5, 67, 0, 0, 368, 408, 1, 0, 0, 0, 369, 408, 3, 88, 44, 0, 370, 408, 3, 90, 45, 0, 371, 408, 3, 84, 42, 0, 372, 408, 5, 49, 0, 0, 373, 408, 3, 92, 46, 0, 374, 375, 5, 65, 0, 0, 375, 380, 3, 86, 43, 0, 376, 377, 5, 35, 0, 0, 377, 379, 3, 86, 43, 0, 378, 376, 1, 0, 0, 0, 379, 382, 1, 0, 0, 0, 380, 378, 1, 0, 0, 0, 380, 381, 1, 0, 0, 0, 381, 383, 1, 0, 0, 0, 382, 380, 1, 0, 0, 0, 383, 384, 5, 66, 0, 0, 384, 408, 1, 0, 0, 0, 385, 386, 5, 65, 0, 0, 386, 391, 3, 84, 42, 0, 387, 388, 5, 35, 0, 0, 388, 390, 3, 84, 42, 0, 389, 387, 1, 0, 0, 0, 390, 393, 1, 0, 0, 0, 391, 389, 1, 0, 0, 0, 391, 392, 1, 0, 0, 0, 392, 394, 1, 0, 0, 0, 393, 391, 1, 0, 0, 0, 394, 395, 5, 66, 0, 0, 395, 408, 1, 0, 0, 0, 396, 397, 5, 65, 0, 0, 397, 402, 3, 92, 46, 0, 398, 399, 5, 35, 0, 0, 399, 401, 3, 92, 46, 0, 400, 398, 1, 0, 0, 0, 401, 404, 1, 0, 0, 0, 402, 400, 1, 0, 0, 0, 402, 403, 1, 0, 0, 0, 403, 405, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 405, 406, 5, 66, 0, 0, 406, 408, 1, 0, 0, 0, 407, 365, 1, 0, 0, 0, 407, 366, 1, 0, 0, 0, 407, 369, 1, 0, 0, 0, 407, 370, 1, 0, 0, 0, 407, 371, 1, 0, 0, 0, 407, 372, 1, 0, 0, 0, 407, 373, 1, 0, 0, 0, 407, 374, 1, 0, 0, 0, 407, 385, 1, 0, 0, 0, 407, 396, 1, 0, 0, 0, 408, 59, 1, 0, 0, 0, 409, 410, 5, 10, 0, 0, 410, 411, 5, 28, 0, 0, 411, 61, 1, 0, 0, 0, 412, 413, 5, 16, 0, 0, 413, 418, 3, 64, 32, 0, 414, 415, 5, 35, 0, 0, 415, 417, 3, 64, 32, 0, 416, 414, 1, 0, 0, 0, 417, 420, 1, 0, 0, 0, 418, 416, 1, 0, 0, 0, 418, 419, 1, 0, 0, 0, 419, 63, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 421, 423, 3, 10, 5, 0, 422, 424, 7, 3, 0, 0, 423, 422, 1, 0, 0, 0, 423, 424, 1, 0, 0, 0, 424, 427, 1, 0, 0, 0, 425, 426, 5, 47, 0, 0, 426, 428, 7, 4, 0, 0, 427, 425, 1, 0, 0, 0, 427, 428, 1, 0, 0, 0, 428, 65, 1, 0, 0, 0, 429, 430, 5, 9, 0, 0, 430, 435, 3, 52, 26, 0, 431, 432, 5, 35, 0, 0, 432, 434, 3, 52, 26, 0, 433, 431, 1, 0, 0, 0, 434, 437, 1, 0, 0, 0, 435, 433, 1, 0, 0, 0, 435, 436, 1, 0, 0, 0, 436, 67, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 438, 439, 5, 2, 0, 0, 439, 444, 3, 52, 26, 0, 440, 441, 5, 35, 0, 0, 441, 443, 3, 52, 26, 0, 442, 440, 1, 0, 0, 0, 443, 446, 1, 0, 0, 0, 444, 442, 1, 0, 0, 0, 444, 445, 1, 0, 0, 0, 445, 69, 1, 0, 0, 0, 446, 444, 1, 0, 0, 0, 447, 448, 5, 13, 0, 0, 448, 453, 3, 72, 36, 0, 449, 450, 5, 35, 0, 0, 450, 452, 3, 72, 36, 0, 451, 449, 1, 0, 0, 0, 452, 455, 1, 0, 0, 0, 453, 451, 1, 0, 0, 0, 453, 454, 1, 0, 0, 0, 454, 71, 1, 0, 0, 0, 455, 453, 1, 0, 0, 0, 456, 457, 3, 52, 26, 0, 457, 458, 5, 82, 0, 0, 458, 459, 3, 52, 26, 0, 459, 73, 1, 0, 0, 0, 460, 461, 5, 1, 0, 0, 461, 462, 3, 18, 9, 0, 462, 464, 3, 92, 46, 0, 463, 465, 3, 80, 40, 0, 464, 463, 1, 0, 0, 0, 464, 465, 1, 0, 0, 0, 465, 75, 1, 0, 0, 0, 466, 467, 5, 7, 0, 0, 467, 468, 3, 18, 9, 0, 468, 469, 3, 92, 46, 0, 469, 77, 1, 0, 0, 0, 470, 471, 5, 12, 0, 0, 471, 472, 3, 50, 25, 0, 472, 79, 1, 0, 0, 0, 473, 478, 3, 82, 41, 0, 474, 475, 5, 35, 0, 0, 475, 477, 3, 82, 41, 0, 476, 474, 1, 0, 0, 0, 477, 480, 1, 0, 0, 0, 478, 476, 1, 0, 0, 0, 478, 479, 1, 0, 0, 0, 479, 81, 1, 0, 0, 0, 480, 478, 1, 0, 0, 0, 481, 482, 3, 54, 27, 0, 482, 483, 5, 33, 0, 0, 483, 484, 3, 58, 29, 0, 484, 83, 1, 0, 0, 0, 485, 486, 7, 5, 0, 0, 486, 85, 1, 0, 0, 0, 487, 490, 3, 88, 44, 0, 488, 490, 3, 90, 45, 0, 489, 487, 1, 0, 0, 0, 489, 488, 1, 0, 0, 0, 490, 87, 1, 0, 0, 0, 491, 493, 7, 0, 0, 0, 492, 491, 1, 0, 0, 0, 492, 493, 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 495, 5, 29, 0, 0, 495, 89, 1, 0, 0, 0, 496, 498, 7, 0, 0, 0, 497, 496, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 499, 1, 0, 0, 0, 499, 500, 5, 28, 0, 0, 500, 91, 1, 0, 0, 0, 501, 502, 5, 27, 0, 0, 502, 93, 1, 0, 0, 0, 503, 504, 7, 6, 0, 0, 504, 95, 1, 0, 0, 0, 505, 506, 5, 5, 0, 0, 506, 507, 3, 98, 49, 0, 507, 97, 1, 0, 0, 0, 508, 509, 5, 65, 0, 0, 509, 510, 3, 2, 1, 0, 510, 511, 5, 66, 0, 0, 511, 99, 1, 0, 0, 0, 512, 513, 5, 15, 0, 0, 513, 514, 5, 98, 0, 0, 514, 101, 1, 0, 0, 0, 515, 516, 5, 11, 0, 0, 516, 517, 5, 102, 0, 0, 517, 103, 1, 0, 0, 0, 518, 519, 5, 3, 0, 0, 519, 522, 5, 88, 0, 0, 520, 521, 5, 86, 0, 0, 521, 523, 3, 52, 26, 0, 522, 520, 1, 0, 0, 0, 522, 523, 1, 0, 0, 0, 523, 533, 1, 0, 0, 0, 524, 525, 5, 87, 0, 0, 525, 530, 3, 106, 53, 0, 526, 527, 5, 35, 0, 0, 527, 529, 3, 106, 53, 0, 528, 526, 1, 0, 0, 0, 529, 532, 1, 0, 0, 0, 530, 528, 1, 0, 0, 0, 530, 531, 1, 0, 0, 0, 531, 534, 1, 0, 0, 0, 532, 530, 1, 0, 0, 0, 533, 524, 1, 0, 0, 0, 533, 534, 1, 0, 0, 0, 534, 105, 1, 0, 0, 0, 535, 536, 3, 52, 26, 0, 536, 537, 5, 33, 0, 0, 537, 539, 1, 0, 0, 0, 538, 535, 1, 0, 0, 0, 538, 539, 1, 0, 0, 0, 539, 540, 1, 0, 0, 0, 540, 541, 3, 52, 26, 0, 541, 107, 1, 0, 0, 0, 52, 119, 127, 142, 154, 163, 171, 175, 183, 185, 190, 197, 202, 209, 215, 223, 225, 236, 243, 254, 257, 271, 279, 287, 291, 294, 304, 313, 321, 333, 337, 343, 350, 358, 380, 391, 402, 407, 418, 423, 427, 435, 444, 453, 464, 478, 489, 492, 497, 522, 530, 533, 538] \ No newline at end of file +[4, 1, 117, 562, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 120, 8, 1, 10, 1, 12, 1, 123, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 131, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 146, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 158, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 165, 8, 5, 10, 5, 12, 5, 168, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 175, 8, 5, 1, 5, 1, 5, 3, 5, 179, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 187, 8, 5, 10, 5, 12, 5, 190, 9, 5, 1, 6, 1, 6, 3, 6, 194, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 201, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 206, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 213, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 219, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 227, 8, 8, 10, 8, 12, 8, 230, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 240, 8, 9, 1, 9, 1, 9, 1, 9, 5, 9, 245, 8, 9, 10, 9, 12, 9, 248, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 256, 8, 10, 10, 10, 12, 10, 259, 9, 10, 3, 10, 261, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 5, 13, 273, 8, 13, 10, 13, 12, 13, 276, 9, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 3, 14, 283, 8, 14, 1, 15, 1, 15, 1, 15, 1, 15, 5, 15, 289, 8, 15, 10, 15, 12, 15, 292, 9, 15, 1, 15, 3, 15, 295, 8, 15, 1, 15, 3, 15, 298, 8, 15, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 306, 8, 17, 10, 17, 12, 17, 309, 9, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 3, 19, 317, 8, 19, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 323, 8, 20, 10, 20, 12, 20, 326, 9, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 336, 8, 22, 10, 22, 12, 22, 339, 9, 22, 1, 22, 3, 22, 342, 8, 22, 1, 22, 1, 22, 3, 22, 346, 8, 22, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 3, 24, 353, 8, 24, 1, 24, 1, 24, 3, 24, 357, 8, 24, 1, 25, 1, 25, 1, 25, 1, 25, 3, 25, 363, 8, 25, 1, 26, 1, 26, 1, 26, 5, 26, 368, 8, 26, 10, 26, 12, 26, 371, 9, 26, 1, 27, 1, 27, 1, 27, 5, 27, 376, 8, 27, 10, 27, 12, 27, 379, 9, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 398, 8, 30, 10, 30, 12, 30, 401, 9, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 409, 8, 30, 10, 30, 12, 30, 412, 9, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 420, 8, 30, 10, 30, 12, 30, 423, 9, 30, 1, 30, 1, 30, 3, 30, 427, 8, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 436, 8, 32, 10, 32, 12, 32, 439, 9, 32, 1, 33, 1, 33, 3, 33, 443, 8, 33, 1, 33, 1, 33, 3, 33, 447, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 453, 8, 34, 10, 34, 12, 34, 456, 9, 34, 1, 35, 1, 35, 1, 35, 1, 35, 5, 35, 462, 8, 35, 10, 35, 12, 35, 465, 9, 35, 1, 36, 1, 36, 1, 36, 1, 36, 5, 36, 471, 8, 36, 10, 36, 12, 36, 474, 9, 36, 1, 37, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 484, 8, 38, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 5, 41, 496, 8, 41, 10, 41, 12, 41, 499, 9, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 44, 1, 44, 3, 44, 509, 8, 44, 1, 45, 3, 45, 512, 8, 45, 1, 45, 1, 45, 1, 46, 3, 46, 517, 8, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 3, 53, 542, 8, 53, 1, 53, 1, 53, 1, 53, 1, 53, 5, 53, 548, 8, 53, 10, 53, 12, 53, 551, 9, 53, 3, 53, 553, 8, 53, 1, 54, 1, 54, 1, 54, 3, 54, 558, 8, 54, 1, 54, 1, 54, 1, 54, 0, 4, 2, 10, 16, 18, 55, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 0, 7, 1, 0, 62, 63, 1, 0, 64, 66, 1, 0, 69, 70, 2, 0, 34, 34, 38, 38, 1, 0, 41, 42, 2, 0, 40, 40, 54, 54, 2, 0, 55, 55, 57, 61, 590, 0, 110, 1, 0, 0, 0, 2, 113, 1, 0, 0, 0, 4, 130, 1, 0, 0, 0, 6, 145, 1, 0, 0, 0, 8, 147, 1, 0, 0, 0, 10, 178, 1, 0, 0, 0, 12, 205, 1, 0, 0, 0, 14, 212, 1, 0, 0, 0, 16, 218, 1, 0, 0, 0, 18, 239, 1, 0, 0, 0, 20, 249, 1, 0, 0, 0, 22, 264, 1, 0, 0, 0, 24, 266, 1, 0, 0, 0, 26, 269, 1, 0, 0, 0, 28, 282, 1, 0, 0, 0, 30, 284, 1, 0, 0, 0, 32, 299, 1, 0, 0, 0, 34, 301, 1, 0, 0, 0, 36, 310, 1, 0, 0, 0, 38, 316, 1, 0, 0, 0, 40, 318, 1, 0, 0, 0, 42, 327, 1, 0, 0, 0, 44, 331, 1, 0, 0, 0, 46, 347, 1, 0, 0, 0, 48, 350, 1, 0, 0, 0, 50, 358, 1, 0, 0, 0, 52, 364, 1, 0, 0, 0, 54, 372, 1, 0, 0, 0, 56, 380, 1, 0, 0, 0, 58, 382, 1, 0, 0, 0, 60, 426, 1, 0, 0, 0, 62, 428, 1, 0, 0, 0, 64, 431, 1, 0, 0, 0, 66, 440, 1, 0, 0, 0, 68, 448, 1, 0, 0, 0, 70, 457, 1, 0, 0, 0, 72, 466, 1, 0, 0, 0, 74, 475, 1, 0, 0, 0, 76, 479, 1, 0, 0, 0, 78, 485, 1, 0, 0, 0, 80, 489, 1, 0, 0, 0, 82, 492, 1, 0, 0, 0, 84, 500, 1, 0, 0, 0, 86, 504, 1, 0, 0, 0, 88, 508, 1, 0, 0, 0, 90, 511, 1, 0, 0, 0, 92, 516, 1, 0, 0, 0, 94, 520, 1, 0, 0, 0, 96, 522, 1, 0, 0, 0, 98, 524, 1, 0, 0, 0, 100, 527, 1, 0, 0, 0, 102, 531, 1, 0, 0, 0, 104, 534, 1, 0, 0, 0, 106, 537, 1, 0, 0, 0, 108, 557, 1, 0, 0, 0, 110, 111, 3, 2, 1, 0, 111, 112, 5, 0, 0, 1, 112, 1, 1, 0, 0, 0, 113, 114, 6, 1, -1, 0, 114, 115, 3, 4, 2, 0, 115, 121, 1, 0, 0, 0, 116, 117, 10, 1, 0, 0, 117, 118, 5, 28, 0, 0, 118, 120, 3, 6, 3, 0, 119, 116, 1, 0, 0, 0, 120, 123, 1, 0, 0, 0, 121, 119, 1, 0, 0, 0, 121, 122, 1, 0, 0, 0, 122, 3, 1, 0, 0, 0, 123, 121, 1, 0, 0, 0, 124, 131, 3, 98, 49, 0, 125, 131, 3, 30, 15, 0, 126, 131, 3, 24, 12, 0, 127, 131, 3, 44, 22, 0, 128, 131, 3, 102, 51, 0, 129, 131, 3, 104, 52, 0, 130, 124, 1, 0, 0, 0, 130, 125, 1, 0, 0, 0, 130, 126, 1, 0, 0, 0, 130, 127, 1, 0, 0, 0, 130, 128, 1, 0, 0, 0, 130, 129, 1, 0, 0, 0, 131, 5, 1, 0, 0, 0, 132, 146, 3, 46, 23, 0, 133, 146, 3, 50, 25, 0, 134, 146, 3, 62, 31, 0, 135, 146, 3, 68, 34, 0, 136, 146, 3, 64, 32, 0, 137, 146, 3, 48, 24, 0, 138, 146, 3, 8, 4, 0, 139, 146, 3, 70, 35, 0, 140, 146, 3, 72, 36, 0, 141, 146, 3, 76, 38, 0, 142, 146, 3, 78, 39, 0, 143, 146, 3, 106, 53, 0, 144, 146, 3, 80, 40, 0, 145, 132, 1, 0, 0, 0, 145, 133, 1, 0, 0, 0, 145, 134, 1, 0, 0, 0, 145, 135, 1, 0, 0, 0, 145, 136, 1, 0, 0, 0, 145, 137, 1, 0, 0, 0, 145, 138, 1, 0, 0, 0, 145, 139, 1, 0, 0, 0, 145, 140, 1, 0, 0, 0, 145, 141, 1, 0, 0, 0, 145, 142, 1, 0, 0, 0, 145, 143, 1, 0, 0, 0, 145, 144, 1, 0, 0, 0, 146, 7, 1, 0, 0, 0, 147, 148, 5, 19, 0, 0, 148, 149, 3, 10, 5, 0, 149, 9, 1, 0, 0, 0, 150, 151, 6, 5, -1, 0, 151, 152, 5, 47, 0, 0, 152, 179, 3, 10, 5, 7, 153, 179, 3, 14, 7, 0, 154, 179, 3, 12, 6, 0, 155, 157, 3, 14, 7, 0, 156, 158, 5, 47, 0, 0, 157, 156, 1, 0, 0, 0, 157, 158, 1, 0, 0, 0, 158, 159, 1, 0, 0, 0, 159, 160, 5, 44, 0, 0, 160, 161, 5, 43, 0, 0, 161, 166, 3, 14, 7, 0, 162, 163, 5, 37, 0, 0, 163, 165, 3, 14, 7, 0, 164, 162, 1, 0, 0, 0, 165, 168, 1, 0, 0, 0, 166, 164, 1, 0, 0, 0, 166, 167, 1, 0, 0, 0, 167, 169, 1, 0, 0, 0, 168, 166, 1, 0, 0, 0, 169, 170, 5, 53, 0, 0, 170, 179, 1, 0, 0, 0, 171, 172, 3, 14, 7, 0, 172, 174, 5, 45, 0, 0, 173, 175, 5, 47, 0, 0, 174, 173, 1, 0, 0, 0, 174, 175, 1, 0, 0, 0, 175, 176, 1, 0, 0, 0, 176, 177, 5, 48, 0, 0, 177, 179, 1, 0, 0, 0, 178, 150, 1, 0, 0, 0, 178, 153, 1, 0, 0, 0, 178, 154, 1, 0, 0, 0, 178, 155, 1, 0, 0, 0, 178, 171, 1, 0, 0, 0, 179, 188, 1, 0, 0, 0, 180, 181, 10, 4, 0, 0, 181, 182, 5, 33, 0, 0, 182, 187, 3, 10, 5, 5, 183, 184, 10, 3, 0, 0, 184, 185, 5, 50, 0, 0, 185, 187, 3, 10, 5, 4, 186, 180, 1, 0, 0, 0, 186, 183, 1, 0, 0, 0, 187, 190, 1, 0, 0, 0, 188, 186, 1, 0, 0, 0, 188, 189, 1, 0, 0, 0, 189, 11, 1, 0, 0, 0, 190, 188, 1, 0, 0, 0, 191, 193, 3, 14, 7, 0, 192, 194, 5, 47, 0, 0, 193, 192, 1, 0, 0, 0, 193, 194, 1, 0, 0, 0, 194, 195, 1, 0, 0, 0, 195, 196, 5, 46, 0, 0, 196, 197, 3, 94, 47, 0, 197, 206, 1, 0, 0, 0, 198, 200, 3, 14, 7, 0, 199, 201, 5, 47, 0, 0, 200, 199, 1, 0, 0, 0, 200, 201, 1, 0, 0, 0, 201, 202, 1, 0, 0, 0, 202, 203, 5, 52, 0, 0, 203, 204, 3, 94, 47, 0, 204, 206, 1, 0, 0, 0, 205, 191, 1, 0, 0, 0, 205, 198, 1, 0, 0, 0, 206, 13, 1, 0, 0, 0, 207, 213, 3, 16, 8, 0, 208, 209, 3, 16, 8, 0, 209, 210, 3, 96, 48, 0, 210, 211, 3, 16, 8, 0, 211, 213, 1, 0, 0, 0, 212, 207, 1, 0, 0, 0, 212, 208, 1, 0, 0, 0, 213, 15, 1, 0, 0, 0, 214, 215, 6, 8, -1, 0, 215, 219, 3, 18, 9, 0, 216, 217, 7, 0, 0, 0, 217, 219, 3, 16, 8, 3, 218, 214, 1, 0, 0, 0, 218, 216, 1, 0, 0, 0, 219, 228, 1, 0, 0, 0, 220, 221, 10, 2, 0, 0, 221, 222, 7, 1, 0, 0, 222, 227, 3, 16, 8, 3, 223, 224, 10, 1, 0, 0, 224, 225, 7, 0, 0, 0, 225, 227, 3, 16, 8, 2, 226, 220, 1, 0, 0, 0, 226, 223, 1, 0, 0, 0, 227, 230, 1, 0, 0, 0, 228, 226, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 17, 1, 0, 0, 0, 230, 228, 1, 0, 0, 0, 231, 232, 6, 9, -1, 0, 232, 240, 3, 60, 30, 0, 233, 240, 3, 52, 26, 0, 234, 240, 3, 20, 10, 0, 235, 236, 5, 43, 0, 0, 236, 237, 3, 10, 5, 0, 237, 238, 5, 53, 0, 0, 238, 240, 1, 0, 0, 0, 239, 231, 1, 0, 0, 0, 239, 233, 1, 0, 0, 0, 239, 234, 1, 0, 0, 0, 239, 235, 1, 0, 0, 0, 240, 246, 1, 0, 0, 0, 241, 242, 10, 1, 0, 0, 242, 243, 5, 36, 0, 0, 243, 245, 3, 22, 11, 0, 244, 241, 1, 0, 0, 0, 245, 248, 1, 0, 0, 0, 246, 244, 1, 0, 0, 0, 246, 247, 1, 0, 0, 0, 247, 19, 1, 0, 0, 0, 248, 246, 1, 0, 0, 0, 249, 250, 3, 56, 28, 0, 250, 260, 5, 43, 0, 0, 251, 261, 5, 64, 0, 0, 252, 257, 3, 10, 5, 0, 253, 254, 5, 37, 0, 0, 254, 256, 3, 10, 5, 0, 255, 253, 1, 0, 0, 0, 256, 259, 1, 0, 0, 0, 257, 255, 1, 0, 0, 0, 257, 258, 1, 0, 0, 0, 258, 261, 1, 0, 0, 0, 259, 257, 1, 0, 0, 0, 260, 251, 1, 0, 0, 0, 260, 252, 1, 0, 0, 0, 260, 261, 1, 0, 0, 0, 261, 262, 1, 0, 0, 0, 262, 263, 5, 53, 0, 0, 263, 21, 1, 0, 0, 0, 264, 265, 3, 56, 28, 0, 265, 23, 1, 0, 0, 0, 266, 267, 5, 15, 0, 0, 267, 268, 3, 26, 13, 0, 268, 25, 1, 0, 0, 0, 269, 274, 3, 28, 14, 0, 270, 271, 5, 37, 0, 0, 271, 273, 3, 28, 14, 0, 272, 270, 1, 0, 0, 0, 273, 276, 1, 0, 0, 0, 274, 272, 1, 0, 0, 0, 274, 275, 1, 0, 0, 0, 275, 27, 1, 0, 0, 0, 276, 274, 1, 0, 0, 0, 277, 283, 3, 10, 5, 0, 278, 279, 3, 52, 26, 0, 279, 280, 5, 35, 0, 0, 280, 281, 3, 10, 5, 0, 281, 283, 1, 0, 0, 0, 282, 277, 1, 0, 0, 0, 282, 278, 1, 0, 0, 0, 283, 29, 1, 0, 0, 0, 284, 285, 5, 6, 0, 0, 285, 290, 3, 32, 16, 0, 286, 287, 5, 37, 0, 0, 287, 289, 3, 32, 16, 0, 288, 286, 1, 0, 0, 0, 289, 292, 1, 0, 0, 0, 290, 288, 1, 0, 0, 0, 290, 291, 1, 0, 0, 0, 291, 294, 1, 0, 0, 0, 292, 290, 1, 0, 0, 0, 293, 295, 3, 38, 19, 0, 294, 293, 1, 0, 0, 0, 294, 295, 1, 0, 0, 0, 295, 297, 1, 0, 0, 0, 296, 298, 3, 34, 17, 0, 297, 296, 1, 0, 0, 0, 297, 298, 1, 0, 0, 0, 298, 31, 1, 0, 0, 0, 299, 300, 5, 24, 0, 0, 300, 33, 1, 0, 0, 0, 301, 302, 5, 74, 0, 0, 302, 307, 3, 36, 18, 0, 303, 304, 5, 37, 0, 0, 304, 306, 3, 36, 18, 0, 305, 303, 1, 0, 0, 0, 306, 309, 1, 0, 0, 0, 307, 305, 1, 0, 0, 0, 307, 308, 1, 0, 0, 0, 308, 35, 1, 0, 0, 0, 309, 307, 1, 0, 0, 0, 310, 311, 3, 94, 47, 0, 311, 312, 5, 35, 0, 0, 312, 313, 3, 94, 47, 0, 313, 37, 1, 0, 0, 0, 314, 317, 3, 40, 20, 0, 315, 317, 3, 42, 21, 0, 316, 314, 1, 0, 0, 0, 316, 315, 1, 0, 0, 0, 317, 39, 1, 0, 0, 0, 318, 319, 5, 75, 0, 0, 319, 324, 3, 32, 16, 0, 320, 321, 5, 37, 0, 0, 321, 323, 3, 32, 16, 0, 322, 320, 1, 0, 0, 0, 323, 326, 1, 0, 0, 0, 324, 322, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 41, 1, 0, 0, 0, 326, 324, 1, 0, 0, 0, 327, 328, 5, 67, 0, 0, 328, 329, 3, 40, 20, 0, 329, 330, 5, 68, 0, 0, 330, 43, 1, 0, 0, 0, 331, 332, 5, 12, 0, 0, 332, 337, 3, 32, 16, 0, 333, 334, 5, 37, 0, 0, 334, 336, 3, 32, 16, 0, 335, 333, 1, 0, 0, 0, 336, 339, 1, 0, 0, 0, 337, 335, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 341, 1, 0, 0, 0, 339, 337, 1, 0, 0, 0, 340, 342, 3, 26, 13, 0, 341, 340, 1, 0, 0, 0, 341, 342, 1, 0, 0, 0, 342, 345, 1, 0, 0, 0, 343, 344, 5, 32, 0, 0, 344, 346, 3, 26, 13, 0, 345, 343, 1, 0, 0, 0, 345, 346, 1, 0, 0, 0, 346, 45, 1, 0, 0, 0, 347, 348, 5, 4, 0, 0, 348, 349, 3, 26, 13, 0, 349, 47, 1, 0, 0, 0, 350, 352, 5, 18, 0, 0, 351, 353, 3, 26, 13, 0, 352, 351, 1, 0, 0, 0, 352, 353, 1, 0, 0, 0, 353, 356, 1, 0, 0, 0, 354, 355, 5, 32, 0, 0, 355, 357, 3, 26, 13, 0, 356, 354, 1, 0, 0, 0, 356, 357, 1, 0, 0, 0, 357, 49, 1, 0, 0, 0, 358, 359, 5, 8, 0, 0, 359, 362, 3, 26, 13, 0, 360, 361, 5, 32, 0, 0, 361, 363, 3, 26, 13, 0, 362, 360, 1, 0, 0, 0, 362, 363, 1, 0, 0, 0, 363, 51, 1, 0, 0, 0, 364, 369, 3, 56, 28, 0, 365, 366, 5, 39, 0, 0, 366, 368, 3, 56, 28, 0, 367, 365, 1, 0, 0, 0, 368, 371, 1, 0, 0, 0, 369, 367, 1, 0, 0, 0, 369, 370, 1, 0, 0, 0, 370, 53, 1, 0, 0, 0, 371, 369, 1, 0, 0, 0, 372, 377, 3, 58, 29, 0, 373, 374, 5, 39, 0, 0, 374, 376, 3, 58, 29, 0, 375, 373, 1, 0, 0, 0, 376, 379, 1, 0, 0, 0, 377, 375, 1, 0, 0, 0, 377, 378, 1, 0, 0, 0, 378, 55, 1, 0, 0, 0, 379, 377, 1, 0, 0, 0, 380, 381, 7, 2, 0, 0, 381, 57, 1, 0, 0, 0, 382, 383, 5, 79, 0, 0, 383, 59, 1, 0, 0, 0, 384, 427, 5, 48, 0, 0, 385, 386, 3, 92, 46, 0, 386, 387, 5, 69, 0, 0, 387, 427, 1, 0, 0, 0, 388, 427, 3, 90, 45, 0, 389, 427, 3, 92, 46, 0, 390, 427, 3, 86, 43, 0, 391, 427, 5, 51, 0, 0, 392, 427, 3, 94, 47, 0, 393, 394, 5, 67, 0, 0, 394, 399, 3, 88, 44, 0, 395, 396, 5, 37, 0, 0, 396, 398, 3, 88, 44, 0, 397, 395, 1, 0, 0, 0, 398, 401, 1, 0, 0, 0, 399, 397, 1, 0, 0, 0, 399, 400, 1, 0, 0, 0, 400, 402, 1, 0, 0, 0, 401, 399, 1, 0, 0, 0, 402, 403, 5, 68, 0, 0, 403, 427, 1, 0, 0, 0, 404, 405, 5, 67, 0, 0, 405, 410, 3, 86, 43, 0, 406, 407, 5, 37, 0, 0, 407, 409, 3, 86, 43, 0, 408, 406, 1, 0, 0, 0, 409, 412, 1, 0, 0, 0, 410, 408, 1, 0, 0, 0, 410, 411, 1, 0, 0, 0, 411, 413, 1, 0, 0, 0, 412, 410, 1, 0, 0, 0, 413, 414, 5, 68, 0, 0, 414, 427, 1, 0, 0, 0, 415, 416, 5, 67, 0, 0, 416, 421, 3, 94, 47, 0, 417, 418, 5, 37, 0, 0, 418, 420, 3, 94, 47, 0, 419, 417, 1, 0, 0, 0, 420, 423, 1, 0, 0, 0, 421, 419, 1, 0, 0, 0, 421, 422, 1, 0, 0, 0, 422, 424, 1, 0, 0, 0, 423, 421, 1, 0, 0, 0, 424, 425, 5, 68, 0, 0, 425, 427, 1, 0, 0, 0, 426, 384, 1, 0, 0, 0, 426, 385, 1, 0, 0, 0, 426, 388, 1, 0, 0, 0, 426, 389, 1, 0, 0, 0, 426, 390, 1, 0, 0, 0, 426, 391, 1, 0, 0, 0, 426, 392, 1, 0, 0, 0, 426, 393, 1, 0, 0, 0, 426, 404, 1, 0, 0, 0, 426, 415, 1, 0, 0, 0, 427, 61, 1, 0, 0, 0, 428, 429, 5, 10, 0, 0, 429, 430, 5, 30, 0, 0, 430, 63, 1, 0, 0, 0, 431, 432, 5, 17, 0, 0, 432, 437, 3, 66, 33, 0, 433, 434, 5, 37, 0, 0, 434, 436, 3, 66, 33, 0, 435, 433, 1, 0, 0, 0, 436, 439, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 437, 438, 1, 0, 0, 0, 438, 65, 1, 0, 0, 0, 439, 437, 1, 0, 0, 0, 440, 442, 3, 10, 5, 0, 441, 443, 7, 3, 0, 0, 442, 441, 1, 0, 0, 0, 442, 443, 1, 0, 0, 0, 443, 446, 1, 0, 0, 0, 444, 445, 5, 49, 0, 0, 445, 447, 7, 4, 0, 0, 446, 444, 1, 0, 0, 0, 446, 447, 1, 0, 0, 0, 447, 67, 1, 0, 0, 0, 448, 449, 5, 9, 0, 0, 449, 454, 3, 54, 27, 0, 450, 451, 5, 37, 0, 0, 451, 453, 3, 54, 27, 0, 452, 450, 1, 0, 0, 0, 453, 456, 1, 0, 0, 0, 454, 452, 1, 0, 0, 0, 454, 455, 1, 0, 0, 0, 455, 69, 1, 0, 0, 0, 456, 454, 1, 0, 0, 0, 457, 458, 5, 2, 0, 0, 458, 463, 3, 54, 27, 0, 459, 460, 5, 37, 0, 0, 460, 462, 3, 54, 27, 0, 461, 459, 1, 0, 0, 0, 462, 465, 1, 0, 0, 0, 463, 461, 1, 0, 0, 0, 463, 464, 1, 0, 0, 0, 464, 71, 1, 0, 0, 0, 465, 463, 1, 0, 0, 0, 466, 467, 5, 14, 0, 0, 467, 472, 3, 74, 37, 0, 468, 469, 5, 37, 0, 0, 469, 471, 3, 74, 37, 0, 470, 468, 1, 0, 0, 0, 471, 474, 1, 0, 0, 0, 472, 470, 1, 0, 0, 0, 472, 473, 1, 0, 0, 0, 473, 73, 1, 0, 0, 0, 474, 472, 1, 0, 0, 0, 475, 476, 3, 54, 27, 0, 476, 477, 5, 83, 0, 0, 477, 478, 3, 54, 27, 0, 478, 75, 1, 0, 0, 0, 479, 480, 5, 1, 0, 0, 480, 481, 3, 18, 9, 0, 481, 483, 3, 94, 47, 0, 482, 484, 3, 82, 41, 0, 483, 482, 1, 0, 0, 0, 483, 484, 1, 0, 0, 0, 484, 77, 1, 0, 0, 0, 485, 486, 5, 7, 0, 0, 486, 487, 3, 18, 9, 0, 487, 488, 3, 94, 47, 0, 488, 79, 1, 0, 0, 0, 489, 490, 5, 13, 0, 0, 490, 491, 3, 52, 26, 0, 491, 81, 1, 0, 0, 0, 492, 497, 3, 84, 42, 0, 493, 494, 5, 37, 0, 0, 494, 496, 3, 84, 42, 0, 495, 493, 1, 0, 0, 0, 496, 499, 1, 0, 0, 0, 497, 495, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 83, 1, 0, 0, 0, 499, 497, 1, 0, 0, 0, 500, 501, 3, 56, 28, 0, 501, 502, 5, 35, 0, 0, 502, 503, 3, 60, 30, 0, 503, 85, 1, 0, 0, 0, 504, 505, 7, 5, 0, 0, 505, 87, 1, 0, 0, 0, 506, 509, 3, 90, 45, 0, 507, 509, 3, 92, 46, 0, 508, 506, 1, 0, 0, 0, 508, 507, 1, 0, 0, 0, 509, 89, 1, 0, 0, 0, 510, 512, 7, 0, 0, 0, 511, 510, 1, 0, 0, 0, 511, 512, 1, 0, 0, 0, 512, 513, 1, 0, 0, 0, 513, 514, 5, 31, 0, 0, 514, 91, 1, 0, 0, 0, 515, 517, 7, 0, 0, 0, 516, 515, 1, 0, 0, 0, 516, 517, 1, 0, 0, 0, 517, 518, 1, 0, 0, 0, 518, 519, 5, 30, 0, 0, 519, 93, 1, 0, 0, 0, 520, 521, 5, 29, 0, 0, 521, 95, 1, 0, 0, 0, 522, 523, 7, 6, 0, 0, 523, 97, 1, 0, 0, 0, 524, 525, 5, 5, 0, 0, 525, 526, 3, 100, 50, 0, 526, 99, 1, 0, 0, 0, 527, 528, 5, 67, 0, 0, 528, 529, 3, 2, 1, 0, 529, 530, 5, 68, 0, 0, 530, 101, 1, 0, 0, 0, 531, 532, 5, 16, 0, 0, 532, 533, 5, 99, 0, 0, 533, 103, 1, 0, 0, 0, 534, 535, 5, 11, 0, 0, 535, 536, 5, 103, 0, 0, 536, 105, 1, 0, 0, 0, 537, 538, 5, 3, 0, 0, 538, 541, 5, 89, 0, 0, 539, 540, 5, 87, 0, 0, 540, 542, 3, 54, 27, 0, 541, 539, 1, 0, 0, 0, 541, 542, 1, 0, 0, 0, 542, 552, 1, 0, 0, 0, 543, 544, 5, 88, 0, 0, 544, 549, 3, 108, 54, 0, 545, 546, 5, 37, 0, 0, 546, 548, 3, 108, 54, 0, 547, 545, 1, 0, 0, 0, 548, 551, 1, 0, 0, 0, 549, 547, 1, 0, 0, 0, 549, 550, 1, 0, 0, 0, 550, 553, 1, 0, 0, 0, 551, 549, 1, 0, 0, 0, 552, 543, 1, 0, 0, 0, 552, 553, 1, 0, 0, 0, 553, 107, 1, 0, 0, 0, 554, 555, 3, 54, 27, 0, 555, 556, 5, 35, 0, 0, 556, 558, 1, 0, 0, 0, 557, 554, 1, 0, 0, 0, 557, 558, 1, 0, 0, 0, 558, 559, 1, 0, 0, 0, 559, 560, 3, 54, 27, 0, 560, 109, 1, 0, 0, 0, 55, 121, 130, 145, 157, 166, 174, 178, 186, 188, 193, 200, 205, 212, 218, 226, 228, 239, 246, 257, 260, 274, 282, 290, 294, 297, 307, 316, 324, 337, 341, 345, 352, 356, 362, 369, 377, 399, 410, 421, 426, 437, 442, 446, 454, 463, 472, 483, 497, 508, 511, 516, 541, 549, 552, 557] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index 2f7f0468e455..7cf25b86ded5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -18,58 +18,60 @@ public class EsqlBaseParser extends Parser { new PredictionContextCache(); public static final int DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, INLINESTATS=8, - KEEP=9, LIMIT=10, META=11, MV_EXPAND=12, RENAME=13, ROW=14, SHOW=15, SORT=16, - STATS=17, WHERE=18, UNKNOWN_CMD=19, LINE_COMMENT=20, MULTILINE_COMMENT=21, - WS=22, EXPLAIN_WS=23, EXPLAIN_LINE_COMMENT=24, EXPLAIN_MULTILINE_COMMENT=25, - PIPE=26, QUOTED_STRING=27, INTEGER_LITERAL=28, DECIMAL_LITERAL=29, BY=30, - AND=31, ASC=32, ASSIGN=33, CAST_OP=34, COMMA=35, DESC=36, DOT=37, FALSE=38, - FIRST=39, LAST=40, LP=41, IN=42, IS=43, LIKE=44, NOT=45, NULL=46, NULLS=47, - OR=48, PARAM=49, RLIKE=50, RP=51, TRUE=52, EQ=53, CIEQ=54, NEQ=55, LT=56, - LTE=57, GT=58, GTE=59, PLUS=60, MINUS=61, ASTERISK=62, SLASH=63, PERCENT=64, - OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, - EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, OPTIONS=72, - METADATA=73, FROM_UNQUOTED_IDENTIFIER=74, FROM_LINE_COMMENT=75, FROM_MULTILINE_COMMENT=76, - FROM_WS=77, ID_PATTERN=78, PROJECT_LINE_COMMENT=79, PROJECT_MULTILINE_COMMENT=80, - PROJECT_WS=81, AS=82, RENAME_LINE_COMMENT=83, RENAME_MULTILINE_COMMENT=84, - RENAME_WS=85, ON=86, WITH=87, ENRICH_POLICY_NAME=88, ENRICH_LINE_COMMENT=89, - ENRICH_MULTILINE_COMMENT=90, ENRICH_WS=91, ENRICH_FIELD_LINE_COMMENT=92, - ENRICH_FIELD_MULTILINE_COMMENT=93, ENRICH_FIELD_WS=94, MVEXPAND_LINE_COMMENT=95, - MVEXPAND_MULTILINE_COMMENT=96, MVEXPAND_WS=97, INFO=98, SHOW_LINE_COMMENT=99, - SHOW_MULTILINE_COMMENT=100, SHOW_WS=101, FUNCTIONS=102, META_LINE_COMMENT=103, - META_MULTILINE_COMMENT=104, META_WS=105, COLON=106, SETTING=107, SETTING_LINE_COMMENT=108, - SETTTING_MULTILINE_COMMENT=109, SETTING_WS=110; + KEEP=9, LIMIT=10, META=11, METRICS=12, MV_EXPAND=13, RENAME=14, ROW=15, + SHOW=16, SORT=17, STATS=18, WHERE=19, UNKNOWN_CMD=20, LINE_COMMENT=21, + MULTILINE_COMMENT=22, WS=23, INDEX_UNQUOTED_IDENTIFIER=24, EXPLAIN_WS=25, + EXPLAIN_LINE_COMMENT=26, EXPLAIN_MULTILINE_COMMENT=27, PIPE=28, QUOTED_STRING=29, + INTEGER_LITERAL=30, DECIMAL_LITERAL=31, BY=32, AND=33, ASC=34, ASSIGN=35, + CAST_OP=36, COMMA=37, DESC=38, DOT=39, FALSE=40, FIRST=41, LAST=42, LP=43, + IN=44, IS=45, LIKE=46, NOT=47, NULL=48, NULLS=49, OR=50, PARAM=51, RLIKE=52, + RP=53, TRUE=54, EQ=55, CIEQ=56, NEQ=57, LT=58, LTE=59, GT=60, GTE=61, + PLUS=62, MINUS=63, ASTERISK=64, SLASH=65, PERCENT=66, OPENING_BRACKET=67, + CLOSING_BRACKET=68, UNQUOTED_IDENTIFIER=69, QUOTED_IDENTIFIER=70, EXPR_LINE_COMMENT=71, + EXPR_MULTILINE_COMMENT=72, EXPR_WS=73, OPTIONS=74, METADATA=75, FROM_LINE_COMMENT=76, + FROM_MULTILINE_COMMENT=77, FROM_WS=78, ID_PATTERN=79, PROJECT_LINE_COMMENT=80, + PROJECT_MULTILINE_COMMENT=81, PROJECT_WS=82, AS=83, RENAME_LINE_COMMENT=84, + RENAME_MULTILINE_COMMENT=85, RENAME_WS=86, ON=87, WITH=88, ENRICH_POLICY_NAME=89, + ENRICH_LINE_COMMENT=90, ENRICH_MULTILINE_COMMENT=91, ENRICH_WS=92, ENRICH_FIELD_LINE_COMMENT=93, + ENRICH_FIELD_MULTILINE_COMMENT=94, ENRICH_FIELD_WS=95, MVEXPAND_LINE_COMMENT=96, + MVEXPAND_MULTILINE_COMMENT=97, MVEXPAND_WS=98, INFO=99, SHOW_LINE_COMMENT=100, + SHOW_MULTILINE_COMMENT=101, SHOW_WS=102, FUNCTIONS=103, META_LINE_COMMENT=104, + META_MULTILINE_COMMENT=105, META_WS=106, COLON=107, SETTING=108, SETTING_LINE_COMMENT=109, + SETTTING_MULTILINE_COMMENT=110, SETTING_WS=111, METRICS_LINE_COMMENT=112, + METRICS_MULTILINE_COMMENT=113, METRICS_WS=114, CLOSING_METRICS_LINE_COMMENT=115, + CLOSING_METRICS_MULTILINE_COMMENT=116, CLOSING_METRICS_WS=117; public static final int RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, RULE_valueExpression = 7, RULE_operatorExpression = 8, RULE_primaryExpression = 9, RULE_functionExpression = 10, RULE_dataType = 11, RULE_rowCommand = 12, - RULE_fields = 13, RULE_field = 14, RULE_fromCommand = 15, RULE_fromIdentifier = 16, + RULE_fields = 13, RULE_field = 14, RULE_fromCommand = 15, RULE_indexIdentifier = 16, RULE_fromOptions = 17, RULE_configOption = 18, RULE_metadata = 19, RULE_metadataOption = 20, - RULE_deprecated_metadata = 21, RULE_evalCommand = 22, RULE_statsCommand = 23, - RULE_inlinestatsCommand = 24, RULE_qualifiedName = 25, RULE_qualifiedNamePattern = 26, - RULE_identifier = 27, RULE_identifierPattern = 28, RULE_constant = 29, - RULE_limitCommand = 30, RULE_sortCommand = 31, RULE_orderExpression = 32, - RULE_keepCommand = 33, RULE_dropCommand = 34, RULE_renameCommand = 35, - RULE_renameClause = 36, RULE_dissectCommand = 37, RULE_grokCommand = 38, - RULE_mvExpandCommand = 39, RULE_commandOptions = 40, RULE_commandOption = 41, - RULE_booleanValue = 42, RULE_numericValue = 43, RULE_decimalValue = 44, - RULE_integerValue = 45, RULE_string = 46, RULE_comparisonOperator = 47, - RULE_explainCommand = 48, RULE_subqueryExpression = 49, RULE_showCommand = 50, - RULE_metaCommand = 51, RULE_enrichCommand = 52, RULE_enrichWithClause = 53; + RULE_deprecated_metadata = 21, RULE_metricsCommand = 22, RULE_evalCommand = 23, + RULE_statsCommand = 24, RULE_inlinestatsCommand = 25, RULE_qualifiedName = 26, + RULE_qualifiedNamePattern = 27, RULE_identifier = 28, RULE_identifierPattern = 29, + RULE_constant = 30, RULE_limitCommand = 31, RULE_sortCommand = 32, RULE_orderExpression = 33, + RULE_keepCommand = 34, RULE_dropCommand = 35, RULE_renameCommand = 36, + RULE_renameClause = 37, RULE_dissectCommand = 38, RULE_grokCommand = 39, + RULE_mvExpandCommand = 40, RULE_commandOptions = 41, RULE_commandOption = 42, + RULE_booleanValue = 43, RULE_numericValue = 44, RULE_decimalValue = 45, + RULE_integerValue = 46, RULE_string = 47, RULE_comparisonOperator = 48, + RULE_explainCommand = 49, RULE_subqueryExpression = 50, RULE_showCommand = 51, + RULE_metaCommand = 52, RULE_enrichCommand = 53, RULE_enrichWithClause = 54; private static String[] makeRuleNames() { return new String[] { "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", "booleanExpression", "regexBooleanExpression", "valueExpression", "operatorExpression", "primaryExpression", "functionExpression", "dataType", "rowCommand", - "fields", "field", "fromCommand", "fromIdentifier", "fromOptions", "configOption", - "metadata", "metadataOption", "deprecated_metadata", "evalCommand", "statsCommand", - "inlinestatsCommand", "qualifiedName", "qualifiedNamePattern", "identifier", - "identifierPattern", "constant", "limitCommand", "sortCommand", "orderExpression", - "keepCommand", "dropCommand", "renameCommand", "renameClause", "dissectCommand", - "grokCommand", "mvExpandCommand", "commandOptions", "commandOption", - "booleanValue", "numericValue", "decimalValue", "integerValue", "string", - "comparisonOperator", "explainCommand", "subqueryExpression", "showCommand", - "metaCommand", "enrichCommand", "enrichWithClause" + "fields", "field", "fromCommand", "indexIdentifier", "fromOptions", "configOption", + "metadata", "metadataOption", "deprecated_metadata", "metricsCommand", + "evalCommand", "statsCommand", "inlinestatsCommand", "qualifiedName", + "qualifiedNamePattern", "identifier", "identifierPattern", "constant", + "limitCommand", "sortCommand", "orderExpression", "keepCommand", "dropCommand", + "renameCommand", "renameClause", "dissectCommand", "grokCommand", "mvExpandCommand", + "commandOptions", "commandOption", "booleanValue", "numericValue", "decimalValue", + "integerValue", "string", "comparisonOperator", "explainCommand", "subqueryExpression", + "showCommand", "metaCommand", "enrichCommand", "enrichWithClause" }; } public static final String[] ruleNames = makeRuleNames(); @@ -77,15 +79,15 @@ private static String[] makeRuleNames() { private static String[] makeLiteralNames() { return new String[] { null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", - "'grok'", "'inlinestats'", "'keep'", "'limit'", "'meta'", "'mv_expand'", - "'rename'", "'row'", "'show'", "'sort'", "'stats'", "'where'", null, - null, null, null, null, null, null, "'|'", null, null, null, "'by'", - "'and'", "'asc'", "'='", "'::'", "','", "'desc'", "'.'", "'false'", "'first'", - "'last'", "'('", "'in'", "'is'", "'like'", "'not'", "'null'", "'nulls'", - "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", - "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, "']'", - null, null, null, null, null, "'options'", "'metadata'", null, null, - null, null, null, null, null, null, "'as'", null, null, null, "'on'", + "'grok'", "'inlinestats'", "'keep'", "'limit'", "'meta'", "'metrics'", + "'mv_expand'", "'rename'", "'row'", "'show'", "'sort'", "'stats'", "'where'", + null, null, null, null, null, null, null, null, "'|'", null, null, null, + "'by'", "'and'", "'asc'", "'='", "'::'", "','", "'desc'", "'.'", "'false'", + "'first'", "'last'", "'('", "'in'", "'is'", "'like'", "'not'", "'null'", + "'nulls'", "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", + "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", + null, "']'", null, null, null, null, null, "'options'", "'metadata'", + null, null, null, null, null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, null, null, null, null, null, null, null, null, "'info'", null, null, null, "'functions'", null, null, null, "':'" }; @@ -94,25 +96,28 @@ private static String[] makeLiteralNames() { private static String[] makeSymbolicNames() { return new String[] { null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", - "INLINESTATS", "KEEP", "LIMIT", "META", "MV_EXPAND", "RENAME", "ROW", - "SHOW", "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", - "WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", - "PIPE", "QUOTED_STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", - "AND", "ASC", "ASSIGN", "CAST_OP", "COMMA", "DESC", "DOT", "FALSE", "FIRST", - "LAST", "LP", "IN", "IS", "LIKE", "NOT", "NULL", "NULLS", "OR", "PARAM", - "RLIKE", "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", - "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", - "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", - "EXPR_WS", "OPTIONS", "METADATA", "FROM_UNQUOTED_IDENTIFIER", "FROM_LINE_COMMENT", - "FROM_MULTILINE_COMMENT", "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", - "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", - "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", - "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", + "INLINESTATS", "KEEP", "LIMIT", "META", "METRICS", "MV_EXPAND", "RENAME", + "ROW", "SHOW", "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", + "MULTILINE_COMMENT", "WS", "INDEX_UNQUOTED_IDENTIFIER", "EXPLAIN_WS", + "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "PIPE", "QUOTED_STRING", + "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", + "COMMA", "DESC", "DOT", "FALSE", "FIRST", "LAST", "LP", "IN", "IS", "LIKE", + "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", + "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", + "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", + "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", + "OPTIONS", "METADATA", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", + "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", + "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", "SHOW_WS", "FUNCTIONS", "META_LINE_COMMENT", "META_MULTILINE_COMMENT", "META_WS", "COLON", "SETTING", "SETTING_LINE_COMMENT", - "SETTTING_MULTILINE_COMMENT", "SETTING_WS" + "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", + "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", + "CLOSING_METRICS_WS" }; } private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); @@ -199,9 +204,9 @@ public final SingleStatementContext singleStatement() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(108); + setState(110); query(0); - setState(109); + setState(111); match(EOF); } } @@ -297,11 +302,11 @@ private QueryContext query(int _p) throws RecognitionException { _ctx = _localctx; _prevctx = _localctx; - setState(112); + setState(114); sourceCommand(); } _ctx.stop = _input.LT(-1); - setState(119); + setState(121); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -312,16 +317,16 @@ private QueryContext query(int _p) throws RecognitionException { { _localctx = new CompositeQueryContext(new QueryContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_query); - setState(114); + setState(116); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(115); + setState(117); match(PIPE); - setState(116); + setState(118); processingCommand(); } } } - setState(121); + setState(123); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); } @@ -349,6 +354,9 @@ public FromCommandContext fromCommand() { public RowCommandContext rowCommand() { return getRuleContext(RowCommandContext.class,0); } + public MetricsCommandContext metricsCommand() { + return getRuleContext(MetricsCommandContext.class,0); + } public ShowCommandContext showCommand() { return getRuleContext(ShowCommandContext.class,0); } @@ -379,41 +387,48 @@ public final SourceCommandContext sourceCommand() throws RecognitionException { SourceCommandContext _localctx = new SourceCommandContext(_ctx, getState()); enterRule(_localctx, 4, RULE_sourceCommand); try { - setState(127); + setState(130); _errHandler.sync(this); switch (_input.LA(1)) { case EXPLAIN: enterOuterAlt(_localctx, 1); { - setState(122); + setState(124); explainCommand(); } break; case FROM: enterOuterAlt(_localctx, 2); { - setState(123); + setState(125); fromCommand(); } break; case ROW: enterOuterAlt(_localctx, 3); { - setState(124); + setState(126); rowCommand(); } break; - case SHOW: + case METRICS: enterOuterAlt(_localctx, 4); { - setState(125); + setState(127); + metricsCommand(); + } + break; + case SHOW: + enterOuterAlt(_localctx, 5); + { + setState(128); showCommand(); } break; case META: - enterOuterAlt(_localctx, 5); + enterOuterAlt(_localctx, 6); { - setState(126); + setState(129); metaCommand(); } break; @@ -497,97 +512,97 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce ProcessingCommandContext _localctx = new ProcessingCommandContext(_ctx, getState()); enterRule(_localctx, 6, RULE_processingCommand); try { - setState(142); + setState(145); _errHandler.sync(this); switch (_input.LA(1)) { case EVAL: enterOuterAlt(_localctx, 1); { - setState(129); + setState(132); evalCommand(); } break; case INLINESTATS: enterOuterAlt(_localctx, 2); { - setState(130); + setState(133); inlinestatsCommand(); } break; case LIMIT: enterOuterAlt(_localctx, 3); { - setState(131); + setState(134); limitCommand(); } break; case KEEP: enterOuterAlt(_localctx, 4); { - setState(132); + setState(135); keepCommand(); } break; case SORT: enterOuterAlt(_localctx, 5); { - setState(133); + setState(136); sortCommand(); } break; case STATS: enterOuterAlt(_localctx, 6); { - setState(134); + setState(137); statsCommand(); } break; case WHERE: enterOuterAlt(_localctx, 7); { - setState(135); + setState(138); whereCommand(); } break; case DROP: enterOuterAlt(_localctx, 8); { - setState(136); + setState(139); dropCommand(); } break; case RENAME: enterOuterAlt(_localctx, 9); { - setState(137); + setState(140); renameCommand(); } break; case DISSECT: enterOuterAlt(_localctx, 10); { - setState(138); + setState(141); dissectCommand(); } break; case GROK: enterOuterAlt(_localctx, 11); { - setState(139); + setState(142); grokCommand(); } break; case ENRICH: enterOuterAlt(_localctx, 12); { - setState(140); + setState(143); enrichCommand(); } break; case MV_EXPAND: enterOuterAlt(_localctx, 13); { - setState(141); + setState(144); mvExpandCommand(); } break; @@ -638,9 +653,9 @@ public final WhereCommandContext whereCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(144); + setState(147); match(WHERE); - setState(145); + setState(148); booleanExpression(0); } } @@ -835,7 +850,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(175); + setState(178); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { case 1: @@ -844,9 +859,9 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(148); + setState(151); match(NOT); - setState(149); + setState(152); booleanExpression(7); } break; @@ -855,7 +870,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(150); + setState(153); valueExpression(); } break; @@ -864,7 +879,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new RegexExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(151); + setState(154); regexBooleanExpression(); } break; @@ -873,41 +888,41 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalInContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(152); + setState(155); valueExpression(); - setState(154); + setState(157); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(153); + setState(156); match(NOT); } } - setState(156); + setState(159); match(IN); - setState(157); + setState(160); match(LP); - setState(158); + setState(161); valueExpression(); - setState(163); + setState(166); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(159); + setState(162); match(COMMA); - setState(160); + setState(163); valueExpression(); } } - setState(165); + setState(168); _errHandler.sync(this); _la = _input.LA(1); } - setState(166); + setState(169); match(RP); } break; @@ -916,27 +931,27 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new IsNullContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(168); + setState(171); valueExpression(); - setState(169); + setState(172); match(IS); - setState(171); + setState(174); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(170); + setState(173); match(NOT); } } - setState(173); + setState(176); match(NULL); } break; } _ctx.stop = _input.LT(-1); - setState(185); + setState(188); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -944,7 +959,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(183); + setState(186); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) { case 1: @@ -952,11 +967,11 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(177); + setState(180); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(178); + setState(181); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(179); + setState(182); ((LogicalBinaryContext)_localctx).right = booleanExpression(5); } break; @@ -965,18 +980,18 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(180); + setState(183); if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(181); + setState(184); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(182); + setState(185); ((LogicalBinaryContext)_localctx).right = booleanExpression(4); } break; } } } - setState(187); + setState(190); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); } @@ -1031,48 +1046,48 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog enterRule(_localctx, 12, RULE_regexBooleanExpression); int _la; try { - setState(202); + setState(205); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(188); + setState(191); valueExpression(); - setState(190); + setState(193); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(189); + setState(192); match(NOT); } } - setState(192); + setState(195); ((RegexBooleanExpressionContext)_localctx).kind = match(LIKE); - setState(193); + setState(196); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(195); + setState(198); valueExpression(); - setState(197); + setState(200); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(196); + setState(199); match(NOT); } } - setState(199); + setState(202); ((RegexBooleanExpressionContext)_localctx).kind = match(RLIKE); - setState(200); + setState(203); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; @@ -1158,14 +1173,14 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, getState()); enterRule(_localctx, 14, RULE_valueExpression); try { - setState(209); + setState(212); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { case 1: _localctx = new ValueExpressionDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(204); + setState(207); operatorExpression(0); } break; @@ -1173,11 +1188,11 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio _localctx = new ComparisonContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(205); + setState(208); ((ComparisonContext)_localctx).left = operatorExpression(0); - setState(206); + setState(209); comparisonOperator(); - setState(207); + setState(210); ((ComparisonContext)_localctx).right = operatorExpression(0); } break; @@ -1302,7 +1317,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE int _alt; enterOuterAlt(_localctx, 1); { - setState(215); + setState(218); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { case 1: @@ -1311,7 +1326,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _ctx = _localctx; _prevctx = _localctx; - setState(212); + setState(215); primaryExpression(0); } break; @@ -1320,7 +1335,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(213); + setState(216); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1331,13 +1346,13 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(214); + setState(217); operatorExpression(3); } break; } _ctx.stop = _input.LT(-1); - setState(225); + setState(228); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1345,7 +1360,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(223); + setState(226); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: @@ -1353,12 +1368,12 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(217); + setState(220); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(218); + setState(221); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); - if ( !(((((_la - 62)) & ~0x3f) == 0 && ((1L << (_la - 62)) & 7L) != 0)) ) { + if ( !(((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & 7L) != 0)) ) { ((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); } else { @@ -1366,7 +1381,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(219); + setState(222); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(3); } break; @@ -1375,9 +1390,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(220); + setState(223); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(221); + setState(224); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1388,14 +1403,14 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(222); + setState(225); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(2); } break; } } } - setState(227); + setState(230); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); } @@ -1553,7 +1568,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(236); + setState(239); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,16,_ctx) ) { case 1: @@ -1562,7 +1577,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(229); + setState(232); constant(); } break; @@ -1571,7 +1586,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new DereferenceContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(230); + setState(233); qualifiedName(); } break; @@ -1580,7 +1595,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new FunctionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(231); + setState(234); functionExpression(); } break; @@ -1589,17 +1604,17 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new ParenthesizedExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(232); + setState(235); match(LP); - setState(233); + setState(236); booleanExpression(0); - setState(234); + setState(237); match(RP); } break; } _ctx.stop = _input.LT(-1); - setState(243); + setState(246); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,17,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1610,16 +1625,16 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc { _localctx = new InlineCastContext(new PrimaryExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_primaryExpression); - setState(238); + setState(241); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(239); + setState(242); match(CAST_OP); - setState(240); + setState(243); dataType(); } } } - setState(245); + setState(248); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,17,_ctx); } @@ -1681,16 +1696,16 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(246); + setState(249); identifier(); - setState(247); + setState(250); match(LP); - setState(257); + setState(260); _errHandler.sync(this); switch (_input.LA(1)) { case ASTERISK: { - setState(248); + setState(251); match(ASTERISK); } break; @@ -1710,21 +1725,21 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx case QUOTED_IDENTIFIER: { { - setState(249); + setState(252); booleanExpression(0); - setState(254); + setState(257); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(250); + setState(253); match(COMMA); - setState(251); + setState(254); booleanExpression(0); } } - setState(256); + setState(259); _errHandler.sync(this); _la = _input.LA(1); } @@ -1736,7 +1751,7 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx default: break; } - setState(259); + setState(262); match(RP); } } @@ -1794,7 +1809,7 @@ public final DataTypeContext dataType() throws RecognitionException { _localctx = new ToDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(261); + setState(264); identifier(); } } @@ -1841,9 +1856,9 @@ public final RowCommandContext rowCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(263); + setState(266); match(ROW); - setState(264); + setState(267); fields(); } } @@ -1897,23 +1912,23 @@ public final FieldsContext fields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(266); + setState(269); field(); - setState(271); + setState(274); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,20,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(267); + setState(270); match(COMMA); - setState(268); + setState(271); field(); } } } - setState(273); + setState(276); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,20,_ctx); } @@ -1963,24 +1978,24 @@ public final FieldContext field() throws RecognitionException { FieldContext _localctx = new FieldContext(_ctx, getState()); enterRule(_localctx, 28, RULE_field); try { - setState(279); + setState(282); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,21,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(274); + setState(277); booleanExpression(0); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(275); + setState(278); qualifiedName(); - setState(276); + setState(279); match(ASSIGN); - setState(277); + setState(280); booleanExpression(0); } break; @@ -2000,11 +2015,11 @@ public final FieldContext field() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class FromCommandContext extends ParserRuleContext { public TerminalNode FROM() { return getToken(EsqlBaseParser.FROM, 0); } - public List fromIdentifier() { - return getRuleContexts(FromIdentifierContext.class); + public List indexIdentifier() { + return getRuleContexts(IndexIdentifierContext.class); } - public FromIdentifierContext fromIdentifier(int i) { - return getRuleContext(FromIdentifierContext.class,i); + public IndexIdentifierContext indexIdentifier(int i) { + return getRuleContext(IndexIdentifierContext.class,i); } public List COMMA() { return getTokens(EsqlBaseParser.COMMA); } public TerminalNode COMMA(int i) { @@ -2043,44 +2058,44 @@ public final FromCommandContext fromCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(281); + setState(284); match(FROM); - setState(282); - fromIdentifier(); - setState(287); + setState(285); + indexIdentifier(); + setState(290); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,22,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(283); + setState(286); match(COMMA); - setState(284); - fromIdentifier(); + setState(287); + indexIdentifier(); } } } - setState(289); + setState(292); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,22,_ctx); } - setState(291); + setState(294); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,23,_ctx) ) { case 1: { - setState(290); + setState(293); metadata(); } break; } - setState(294); + setState(297); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: { - setState(293); + setState(296); fromOptions(); } break; @@ -2099,36 +2114,36 @@ public final FromCommandContext fromCommand() throws RecognitionException { } @SuppressWarnings("CheckReturnValue") - public static class FromIdentifierContext extends ParserRuleContext { - public TerminalNode FROM_UNQUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.FROM_UNQUOTED_IDENTIFIER, 0); } + public static class IndexIdentifierContext extends ParserRuleContext { + public TerminalNode INDEX_UNQUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.INDEX_UNQUOTED_IDENTIFIER, 0); } @SuppressWarnings("this-escape") - public FromIdentifierContext(ParserRuleContext parent, int invokingState) { + public IndexIdentifierContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } - @Override public int getRuleIndex() { return RULE_fromIdentifier; } + @Override public int getRuleIndex() { return RULE_indexIdentifier; } @Override public void enterRule(ParseTreeListener listener) { - if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterFromIdentifier(this); + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterIndexIdentifier(this); } @Override public void exitRule(ParseTreeListener listener) { - if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitFromIdentifier(this); + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitIndexIdentifier(this); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitFromIdentifier(this); + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitIndexIdentifier(this); else return visitor.visitChildren(this); } } - public final FromIdentifierContext fromIdentifier() throws RecognitionException { - FromIdentifierContext _localctx = new FromIdentifierContext(_ctx, getState()); - enterRule(_localctx, 32, RULE_fromIdentifier); + public final IndexIdentifierContext indexIdentifier() throws RecognitionException { + IndexIdentifierContext _localctx = new IndexIdentifierContext(_ctx, getState()); + enterRule(_localctx, 32, RULE_indexIdentifier); try { enterOuterAlt(_localctx, 1); { - setState(296); - match(FROM_UNQUOTED_IDENTIFIER); + setState(299); + match(INDEX_UNQUOTED_IDENTIFIER); } } catch (RecognitionException re) { @@ -2182,25 +2197,25 @@ public final FromOptionsContext fromOptions() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(298); + setState(301); match(OPTIONS); - setState(299); + setState(302); configOption(); - setState(304); + setState(307); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,25,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(300); + setState(303); match(COMMA); - setState(301); + setState(304); configOption(); } } } - setState(306); + setState(309); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,25,_ctx); } @@ -2252,11 +2267,11 @@ public final ConfigOptionContext configOption() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(307); + setState(310); string(); - setState(308); + setState(311); match(ASSIGN); - setState(309); + setState(312); string(); } } @@ -2303,20 +2318,20 @@ public final MetadataContext metadata() throws RecognitionException { MetadataContext _localctx = new MetadataContext(_ctx, getState()); enterRule(_localctx, 38, RULE_metadata); try { - setState(313); + setState(316); _errHandler.sync(this); switch (_input.LA(1)) { case METADATA: enterOuterAlt(_localctx, 1); { - setState(311); + setState(314); metadataOption(); } break; case OPENING_BRACKET: enterOuterAlt(_localctx, 2); { - setState(312); + setState(315); deprecated_metadata(); } break; @@ -2338,11 +2353,11 @@ public final MetadataContext metadata() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class MetadataOptionContext extends ParserRuleContext { public TerminalNode METADATA() { return getToken(EsqlBaseParser.METADATA, 0); } - public List fromIdentifier() { - return getRuleContexts(FromIdentifierContext.class); + public List indexIdentifier() { + return getRuleContexts(IndexIdentifierContext.class); } - public FromIdentifierContext fromIdentifier(int i) { - return getRuleContext(FromIdentifierContext.class,i); + public IndexIdentifierContext indexIdentifier(int i) { + return getRuleContext(IndexIdentifierContext.class,i); } public List COMMA() { return getTokens(EsqlBaseParser.COMMA); } public TerminalNode COMMA(int i) { @@ -2375,25 +2390,25 @@ public final MetadataOptionContext metadataOption() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(315); + setState(318); match(METADATA); - setState(316); - fromIdentifier(); - setState(321); + setState(319); + indexIdentifier(); + setState(324); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(317); + setState(320); match(COMMA); - setState(318); - fromIdentifier(); + setState(321); + indexIdentifier(); } } } - setState(323); + setState(326); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); } @@ -2442,11 +2457,11 @@ public final Deprecated_metadataContext deprecated_metadata() throws Recognition try { enterOuterAlt(_localctx, 1); { - setState(324); + setState(327); match(OPENING_BRACKET); - setState(325); + setState(328); metadataOption(); - setState(326); + setState(329); match(CLOSING_BRACKET); } } @@ -2461,6 +2476,112 @@ public final Deprecated_metadataContext deprecated_metadata() throws Recognition return _localctx; } + @SuppressWarnings("CheckReturnValue") + public static class MetricsCommandContext extends ParserRuleContext { + public FieldsContext aggregates; + public FieldsContext grouping; + public TerminalNode METRICS() { return getToken(EsqlBaseParser.METRICS, 0); } + public List indexIdentifier() { + return getRuleContexts(IndexIdentifierContext.class); + } + public IndexIdentifierContext indexIdentifier(int i) { + return getRuleContext(IndexIdentifierContext.class,i); + } + public List COMMA() { return getTokens(EsqlBaseParser.COMMA); } + public TerminalNode COMMA(int i) { + return getToken(EsqlBaseParser.COMMA, i); + } + public TerminalNode BY() { return getToken(EsqlBaseParser.BY, 0); } + public List fields() { + return getRuleContexts(FieldsContext.class); + } + public FieldsContext fields(int i) { + return getRuleContext(FieldsContext.class,i); + } + @SuppressWarnings("this-escape") + public MetricsCommandContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_metricsCommand; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterMetricsCommand(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitMetricsCommand(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitMetricsCommand(this); + else return visitor.visitChildren(this); + } + } + + public final MetricsCommandContext metricsCommand() throws RecognitionException { + MetricsCommandContext _localctx = new MetricsCommandContext(_ctx, getState()); + enterRule(_localctx, 44, RULE_metricsCommand); + try { + int _alt; + enterOuterAlt(_localctx, 1); + { + setState(331); + match(METRICS); + setState(332); + indexIdentifier(); + setState(337); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,28,_ctx); + while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { + if ( _alt==1 ) { + { + { + setState(333); + match(COMMA); + setState(334); + indexIdentifier(); + } + } + } + setState(339); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,28,_ctx); + } + setState(341); + _errHandler.sync(this); + switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { + case 1: + { + setState(340); + ((MetricsCommandContext)_localctx).aggregates = fields(); + } + break; + } + setState(345); + _errHandler.sync(this); + switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { + case 1: + { + setState(343); + match(BY); + setState(344); + ((MetricsCommandContext)_localctx).grouping = fields(); + } + break; + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + @SuppressWarnings("CheckReturnValue") public static class EvalCommandContext extends ParserRuleContext { public TerminalNode EVAL() { return getToken(EsqlBaseParser.EVAL, 0); } @@ -2489,13 +2610,13 @@ public T accept(ParseTreeVisitor visitor) { public final EvalCommandContext evalCommand() throws RecognitionException { EvalCommandContext _localctx = new EvalCommandContext(_ctx, getState()); - enterRule(_localctx, 44, RULE_evalCommand); + enterRule(_localctx, 46, RULE_evalCommand); try { enterOuterAlt(_localctx, 1); { - setState(328); + setState(347); match(EVAL); - setState(329); + setState(348); fields(); } } @@ -2544,30 +2665,30 @@ public T accept(ParseTreeVisitor visitor) { public final StatsCommandContext statsCommand() throws RecognitionException { StatsCommandContext _localctx = new StatsCommandContext(_ctx, getState()); - enterRule(_localctx, 46, RULE_statsCommand); + enterRule(_localctx, 48, RULE_statsCommand); try { enterOuterAlt(_localctx, 1); { - setState(331); + setState(350); match(STATS); - setState(333); + setState(352); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,28,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { case 1: { - setState(332); + setState(351); ((StatsCommandContext)_localctx).stats = fields(); } break; } - setState(337); + setState(356); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,32,_ctx) ) { case 1: { - setState(335); + setState(354); match(BY); - setState(336); + setState(355); ((StatsCommandContext)_localctx).grouping = fields(); } break; @@ -2619,22 +2740,22 @@ public T accept(ParseTreeVisitor visitor) { public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionException { InlinestatsCommandContext _localctx = new InlinestatsCommandContext(_ctx, getState()); - enterRule(_localctx, 48, RULE_inlinestatsCommand); + enterRule(_localctx, 50, RULE_inlinestatsCommand); try { enterOuterAlt(_localctx, 1); { - setState(339); + setState(358); match(INLINESTATS); - setState(340); + setState(359); ((InlinestatsCommandContext)_localctx).stats = fields(); - setState(343); + setState(362); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) { case 1: { - setState(341); + setState(360); match(BY); - setState(342); + setState(361); ((InlinestatsCommandContext)_localctx).grouping = fields(); } break; @@ -2686,30 +2807,30 @@ public T accept(ParseTreeVisitor visitor) { public final QualifiedNameContext qualifiedName() throws RecognitionException { QualifiedNameContext _localctx = new QualifiedNameContext(_ctx, getState()); - enterRule(_localctx, 50, RULE_qualifiedName); + enterRule(_localctx, 52, RULE_qualifiedName); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(345); + setState(364); identifier(); - setState(350); + setState(369); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,31,_ctx); + _alt = getInterpreter().adaptivePredict(_input,34,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(346); + setState(365); match(DOT); - setState(347); + setState(366); identifier(); } } } - setState(352); + setState(371); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,31,_ctx); + _alt = getInterpreter().adaptivePredict(_input,34,_ctx); } } } @@ -2758,30 +2879,30 @@ public T accept(ParseTreeVisitor visitor) { public final QualifiedNamePatternContext qualifiedNamePattern() throws RecognitionException { QualifiedNamePatternContext _localctx = new QualifiedNamePatternContext(_ctx, getState()); - enterRule(_localctx, 52, RULE_qualifiedNamePattern); + enterRule(_localctx, 54, RULE_qualifiedNamePattern); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(353); + setState(372); identifierPattern(); - setState(358); + setState(377); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,32,_ctx); + _alt = getInterpreter().adaptivePredict(_input,35,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(354); + setState(373); match(DOT); - setState(355); + setState(374); identifierPattern(); } } } - setState(360); + setState(379); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,32,_ctx); + _alt = getInterpreter().adaptivePredict(_input,35,_ctx); } } } @@ -2822,12 +2943,12 @@ public T accept(ParseTreeVisitor visitor) { public final IdentifierContext identifier() throws RecognitionException { IdentifierContext _localctx = new IdentifierContext(_ctx, getState()); - enterRule(_localctx, 54, RULE_identifier); + enterRule(_localctx, 56, RULE_identifier); int _la; try { enterOuterAlt(_localctx, 1); { - setState(361); + setState(380); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -2875,11 +2996,11 @@ public T accept(ParseTreeVisitor visitor) { public final IdentifierPatternContext identifierPattern() throws RecognitionException { IdentifierPatternContext _localctx = new IdentifierPatternContext(_ctx, getState()); - enterRule(_localctx, 56, RULE_identifierPattern); + enterRule(_localctx, 58, RULE_identifierPattern); try { enterOuterAlt(_localctx, 1); { - setState(363); + setState(382); match(ID_PATTERN); } } @@ -3145,17 +3266,17 @@ public T accept(ParseTreeVisitor visitor) { public final ConstantContext constant() throws RecognitionException { ConstantContext _localctx = new ConstantContext(_ctx, getState()); - enterRule(_localctx, 58, RULE_constant); + enterRule(_localctx, 60, RULE_constant); int _la; try { - setState(407); + setState(426); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,36,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,39,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(365); + setState(384); match(NULL); } break; @@ -3163,9 +3284,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(366); + setState(385); integerValue(); - setState(367); + setState(386); match(UNQUOTED_IDENTIFIER); } break; @@ -3173,7 +3294,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(369); + setState(388); decimalValue(); } break; @@ -3181,7 +3302,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(370); + setState(389); integerValue(); } break; @@ -3189,7 +3310,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(371); + setState(390); booleanValue(); } break; @@ -3197,7 +3318,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(372); + setState(391); match(PARAM); } break; @@ -3205,7 +3326,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(373); + setState(392); string(); } break; @@ -3213,27 +3334,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(374); + setState(393); match(OPENING_BRACKET); - setState(375); + setState(394); numericValue(); - setState(380); + setState(399); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(376); + setState(395); match(COMMA); - setState(377); + setState(396); numericValue(); } } - setState(382); + setState(401); _errHandler.sync(this); _la = _input.LA(1); } - setState(383); + setState(402); match(CLOSING_BRACKET); } break; @@ -3241,27 +3362,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(385); + setState(404); match(OPENING_BRACKET); - setState(386); + setState(405); booleanValue(); - setState(391); + setState(410); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(387); + setState(406); match(COMMA); - setState(388); + setState(407); booleanValue(); } } - setState(393); + setState(412); _errHandler.sync(this); _la = _input.LA(1); } - setState(394); + setState(413); match(CLOSING_BRACKET); } break; @@ -3269,27 +3390,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(396); + setState(415); match(OPENING_BRACKET); - setState(397); + setState(416); string(); - setState(402); + setState(421); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(398); + setState(417); match(COMMA); - setState(399); + setState(418); string(); } } - setState(404); + setState(423); _errHandler.sync(this); _la = _input.LA(1); } - setState(405); + setState(424); match(CLOSING_BRACKET); } break; @@ -3332,13 +3453,13 @@ public T accept(ParseTreeVisitor visitor) { public final LimitCommandContext limitCommand() throws RecognitionException { LimitCommandContext _localctx = new LimitCommandContext(_ctx, getState()); - enterRule(_localctx, 60, RULE_limitCommand); + enterRule(_localctx, 62, RULE_limitCommand); try { enterOuterAlt(_localctx, 1); { - setState(409); + setState(428); match(LIMIT); - setState(410); + setState(429); match(INTEGER_LITERAL); } } @@ -3388,32 +3509,32 @@ public T accept(ParseTreeVisitor visitor) { public final SortCommandContext sortCommand() throws RecognitionException { SortCommandContext _localctx = new SortCommandContext(_ctx, getState()); - enterRule(_localctx, 62, RULE_sortCommand); + enterRule(_localctx, 64, RULE_sortCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(412); + setState(431); match(SORT); - setState(413); + setState(432); orderExpression(); - setState(418); + setState(437); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,37,_ctx); + _alt = getInterpreter().adaptivePredict(_input,40,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(414); + setState(433); match(COMMA); - setState(415); + setState(434); orderExpression(); } } } - setState(420); + setState(439); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,37,_ctx); + _alt = getInterpreter().adaptivePredict(_input,40,_ctx); } } } @@ -3462,19 +3583,19 @@ public T accept(ParseTreeVisitor visitor) { public final OrderExpressionContext orderExpression() throws RecognitionException { OrderExpressionContext _localctx = new OrderExpressionContext(_ctx, getState()); - enterRule(_localctx, 64, RULE_orderExpression); + enterRule(_localctx, 66, RULE_orderExpression); int _la; try { enterOuterAlt(_localctx, 1); { - setState(421); + setState(440); booleanExpression(0); - setState(423); + setState(442); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,38,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) { case 1: { - setState(422); + setState(441); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -3488,14 +3609,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(427); + setState(446); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,39,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,42,_ctx) ) { case 1: { - setState(425); + setState(444); match(NULLS); - setState(426); + setState(445); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -3557,32 +3678,32 @@ public T accept(ParseTreeVisitor visitor) { public final KeepCommandContext keepCommand() throws RecognitionException { KeepCommandContext _localctx = new KeepCommandContext(_ctx, getState()); - enterRule(_localctx, 66, RULE_keepCommand); + enterRule(_localctx, 68, RULE_keepCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(429); + setState(448); match(KEEP); - setState(430); + setState(449); qualifiedNamePattern(); - setState(435); + setState(454); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,40,_ctx); + _alt = getInterpreter().adaptivePredict(_input,43,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(431); + setState(450); match(COMMA); - setState(432); + setState(451); qualifiedNamePattern(); } } } - setState(437); + setState(456); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,40,_ctx); + _alt = getInterpreter().adaptivePredict(_input,43,_ctx); } } } @@ -3632,32 +3753,32 @@ public T accept(ParseTreeVisitor visitor) { public final DropCommandContext dropCommand() throws RecognitionException { DropCommandContext _localctx = new DropCommandContext(_ctx, getState()); - enterRule(_localctx, 68, RULE_dropCommand); + enterRule(_localctx, 70, RULE_dropCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(438); + setState(457); match(DROP); - setState(439); + setState(458); qualifiedNamePattern(); - setState(444); + setState(463); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,41,_ctx); + _alt = getInterpreter().adaptivePredict(_input,44,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(440); + setState(459); match(COMMA); - setState(441); + setState(460); qualifiedNamePattern(); } } } - setState(446); + setState(465); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,41,_ctx); + _alt = getInterpreter().adaptivePredict(_input,44,_ctx); } } } @@ -3707,32 +3828,32 @@ public T accept(ParseTreeVisitor visitor) { public final RenameCommandContext renameCommand() throws RecognitionException { RenameCommandContext _localctx = new RenameCommandContext(_ctx, getState()); - enterRule(_localctx, 70, RULE_renameCommand); + enterRule(_localctx, 72, RULE_renameCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(447); + setState(466); match(RENAME); - setState(448); + setState(467); renameClause(); - setState(453); + setState(472); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,42,_ctx); + _alt = getInterpreter().adaptivePredict(_input,45,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(449); + setState(468); match(COMMA); - setState(450); + setState(469); renameClause(); } } } - setState(455); + setState(474); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,42,_ctx); + _alt = getInterpreter().adaptivePredict(_input,45,_ctx); } } } @@ -3780,15 +3901,15 @@ public T accept(ParseTreeVisitor visitor) { public final RenameClauseContext renameClause() throws RecognitionException { RenameClauseContext _localctx = new RenameClauseContext(_ctx, getState()); - enterRule(_localctx, 72, RULE_renameClause); + enterRule(_localctx, 74, RULE_renameClause); try { enterOuterAlt(_localctx, 1); { - setState(456); + setState(475); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); - setState(457); + setState(476); match(AS); - setState(458); + setState(477); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -3837,22 +3958,22 @@ public T accept(ParseTreeVisitor visitor) { public final DissectCommandContext dissectCommand() throws RecognitionException { DissectCommandContext _localctx = new DissectCommandContext(_ctx, getState()); - enterRule(_localctx, 74, RULE_dissectCommand); + enterRule(_localctx, 76, RULE_dissectCommand); try { enterOuterAlt(_localctx, 1); { - setState(460); + setState(479); match(DISSECT); - setState(461); + setState(480); primaryExpression(0); - setState(462); + setState(481); string(); - setState(464); + setState(483); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { case 1: { - setState(463); + setState(482); commandOptions(); } break; @@ -3901,15 +4022,15 @@ public T accept(ParseTreeVisitor visitor) { public final GrokCommandContext grokCommand() throws RecognitionException { GrokCommandContext _localctx = new GrokCommandContext(_ctx, getState()); - enterRule(_localctx, 76, RULE_grokCommand); + enterRule(_localctx, 78, RULE_grokCommand); try { enterOuterAlt(_localctx, 1); { - setState(466); + setState(485); match(GROK); - setState(467); + setState(486); primaryExpression(0); - setState(468); + setState(487); string(); } } @@ -3952,13 +4073,13 @@ public T accept(ParseTreeVisitor visitor) { public final MvExpandCommandContext mvExpandCommand() throws RecognitionException { MvExpandCommandContext _localctx = new MvExpandCommandContext(_ctx, getState()); - enterRule(_localctx, 78, RULE_mvExpandCommand); + enterRule(_localctx, 80, RULE_mvExpandCommand); try { enterOuterAlt(_localctx, 1); { - setState(470); + setState(489); match(MV_EXPAND); - setState(471); + setState(490); qualifiedName(); } } @@ -4007,30 +4128,30 @@ public T accept(ParseTreeVisitor visitor) { public final CommandOptionsContext commandOptions() throws RecognitionException { CommandOptionsContext _localctx = new CommandOptionsContext(_ctx, getState()); - enterRule(_localctx, 80, RULE_commandOptions); + enterRule(_localctx, 82, RULE_commandOptions); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(473); + setState(492); commandOption(); - setState(478); + setState(497); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,44,_ctx); + _alt = getInterpreter().adaptivePredict(_input,47,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(474); + setState(493); match(COMMA); - setState(475); + setState(494); commandOption(); } } } - setState(480); + setState(499); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,44,_ctx); + _alt = getInterpreter().adaptivePredict(_input,47,_ctx); } } } @@ -4076,15 +4197,15 @@ public T accept(ParseTreeVisitor visitor) { public final CommandOptionContext commandOption() throws RecognitionException { CommandOptionContext _localctx = new CommandOptionContext(_ctx, getState()); - enterRule(_localctx, 82, RULE_commandOption); + enterRule(_localctx, 84, RULE_commandOption); try { enterOuterAlt(_localctx, 1); { - setState(481); + setState(500); identifier(); - setState(482); + setState(501); match(ASSIGN); - setState(483); + setState(502); constant(); } } @@ -4125,12 +4246,12 @@ public T accept(ParseTreeVisitor visitor) { public final BooleanValueContext booleanValue() throws RecognitionException { BooleanValueContext _localctx = new BooleanValueContext(_ctx, getState()); - enterRule(_localctx, 84, RULE_booleanValue); + enterRule(_localctx, 86, RULE_booleanValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(485); + setState(504); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -4183,22 +4304,22 @@ public T accept(ParseTreeVisitor visitor) { public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); - enterRule(_localctx, 86, RULE_numericValue); + enterRule(_localctx, 88, RULE_numericValue); try { - setState(489); + setState(508); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(487); + setState(506); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(488); + setState(507); integerValue(); } break; @@ -4242,17 +4363,17 @@ public T accept(ParseTreeVisitor visitor) { public final DecimalValueContext decimalValue() throws RecognitionException { DecimalValueContext _localctx = new DecimalValueContext(_ctx, getState()); - enterRule(_localctx, 88, RULE_decimalValue); + enterRule(_localctx, 90, RULE_decimalValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(492); + setState(511); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(491); + setState(510); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4265,7 +4386,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(494); + setState(513); match(DECIMAL_LITERAL); } } @@ -4307,17 +4428,17 @@ public T accept(ParseTreeVisitor visitor) { public final IntegerValueContext integerValue() throws RecognitionException { IntegerValueContext _localctx = new IntegerValueContext(_ctx, getState()); - enterRule(_localctx, 90, RULE_integerValue); + enterRule(_localctx, 92, RULE_integerValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(497); + setState(516); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(496); + setState(515); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4330,7 +4451,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(499); + setState(518); match(INTEGER_LITERAL); } } @@ -4370,11 +4491,11 @@ public T accept(ParseTreeVisitor visitor) { public final StringContext string() throws RecognitionException { StringContext _localctx = new StringContext(_ctx, getState()); - enterRule(_localctx, 92, RULE_string); + enterRule(_localctx, 94, RULE_string); try { enterOuterAlt(_localctx, 1); { - setState(501); + setState(520); match(QUOTED_STRING); } } @@ -4419,14 +4540,14 @@ public T accept(ParseTreeVisitor visitor) { public final ComparisonOperatorContext comparisonOperator() throws RecognitionException { ComparisonOperatorContext _localctx = new ComparisonOperatorContext(_ctx, getState()); - enterRule(_localctx, 94, RULE_comparisonOperator); + enterRule(_localctx, 96, RULE_comparisonOperator); int _la; try { enterOuterAlt(_localctx, 1); { - setState(503); + setState(522); _la = _input.LA(1); - if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 1125899906842624000L) != 0)) ) { + if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 4503599627370496000L) != 0)) ) { _errHandler.recoverInline(this); } else { @@ -4475,13 +4596,13 @@ public T accept(ParseTreeVisitor visitor) { public final ExplainCommandContext explainCommand() throws RecognitionException { ExplainCommandContext _localctx = new ExplainCommandContext(_ctx, getState()); - enterRule(_localctx, 96, RULE_explainCommand); + enterRule(_localctx, 98, RULE_explainCommand); try { enterOuterAlt(_localctx, 1); { - setState(505); + setState(524); match(EXPLAIN); - setState(506); + setState(525); subqueryExpression(); } } @@ -4525,15 +4646,15 @@ public T accept(ParseTreeVisitor visitor) { public final SubqueryExpressionContext subqueryExpression() throws RecognitionException { SubqueryExpressionContext _localctx = new SubqueryExpressionContext(_ctx, getState()); - enterRule(_localctx, 98, RULE_subqueryExpression); + enterRule(_localctx, 100, RULE_subqueryExpression); try { enterOuterAlt(_localctx, 1); { - setState(508); + setState(527); match(OPENING_BRACKET); - setState(509); + setState(528); query(0); - setState(510); + setState(529); match(CLOSING_BRACKET); } } @@ -4585,14 +4706,14 @@ public T accept(ParseTreeVisitor visitor) { public final ShowCommandContext showCommand() throws RecognitionException { ShowCommandContext _localctx = new ShowCommandContext(_ctx, getState()); - enterRule(_localctx, 100, RULE_showCommand); + enterRule(_localctx, 102, RULE_showCommand); try { _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(512); + setState(531); match(SHOW); - setState(513); + setState(532); match(INFO); } } @@ -4644,14 +4765,14 @@ public T accept(ParseTreeVisitor visitor) { public final MetaCommandContext metaCommand() throws RecognitionException { MetaCommandContext _localctx = new MetaCommandContext(_ctx, getState()); - enterRule(_localctx, 102, RULE_metaCommand); + enterRule(_localctx, 104, RULE_metaCommand); try { _localctx = new MetaFunctionsContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(515); + setState(534); match(META); - setState(516); + setState(535); match(FUNCTIONS); } } @@ -4709,53 +4830,53 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichCommandContext enrichCommand() throws RecognitionException { EnrichCommandContext _localctx = new EnrichCommandContext(_ctx, getState()); - enterRule(_localctx, 104, RULE_enrichCommand); + enterRule(_localctx, 106, RULE_enrichCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(518); + setState(537); match(ENRICH); - setState(519); + setState(538); ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); - setState(522); + setState(541); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,51,_ctx) ) { case 1: { - setState(520); + setState(539); match(ON); - setState(521); + setState(540); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(533); + setState(552); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,50,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,53,_ctx) ) { case 1: { - setState(524); + setState(543); match(WITH); - setState(525); + setState(544); enrichWithClause(); - setState(530); + setState(549); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,49,_ctx); + _alt = getInterpreter().adaptivePredict(_input,52,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(526); + setState(545); match(COMMA); - setState(527); + setState(546); enrichWithClause(); } } } - setState(532); + setState(551); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,49,_ctx); + _alt = getInterpreter().adaptivePredict(_input,52,_ctx); } } break; @@ -4806,23 +4927,23 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichWithClauseContext enrichWithClause() throws RecognitionException { EnrichWithClauseContext _localctx = new EnrichWithClauseContext(_ctx, getState()); - enterRule(_localctx, 106, RULE_enrichWithClause); + enterRule(_localctx, 108, RULE_enrichWithClause); try { enterOuterAlt(_localctx, 1); { - setState(538); + setState(557); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,51,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,54,_ctx) ) { case 1: { - setState(535); + setState(554); ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(536); + setState(555); match(ASSIGN); } break; } - setState(540); + setState(559); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -4884,7 +5005,7 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in } public static final String _serializedATN = - "\u0004\u0001n\u021f\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ + "\u0004\u0001u\u0232\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ @@ -4898,337 +5019,350 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in "#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002"+ "(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002"+ "-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u0002"+ - "2\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u0001\u0000\u0001\u0000"+ - "\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0005\u0001v\b\u0001\n\u0001\f\u0001y\t\u0001\u0001\u0002"+ - "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002\u0080\b\u0002"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0003\u0003\u008f\b\u0003\u0001\u0004\u0001\u0004\u0001\u0004"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0003\u0005\u009b\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0005\u0005\u00a2\b\u0005\n\u0005\f\u0005\u00a5"+ - "\t\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003"+ - "\u0005\u00ac\b\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00b0\b\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0005\u0005\u00b8\b\u0005\n\u0005\f\u0005\u00bb\t\u0005\u0001\u0006\u0001"+ - "\u0006\u0003\u0006\u00bf\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ - "\u0006\u0001\u0006\u0003\u0006\u00c6\b\u0006\u0001\u0006\u0001\u0006\u0001"+ - "\u0006\u0003\u0006\u00cb\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ - "\u0007\u0001\u0007\u0003\u0007\u00d2\b\u0007\u0001\b\u0001\b\u0001\b\u0001"+ - "\b\u0003\b\u00d8\b\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0005"+ - "\b\u00e0\b\b\n\b\f\b\u00e3\t\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t"+ - "\u0001\t\u0001\t\u0001\t\u0003\t\u00ed\b\t\u0001\t\u0001\t\u0001\t\u0005"+ - "\t\u00f2\b\t\n\t\f\t\u00f5\t\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n"+ - "\u0001\n\u0005\n\u00fd\b\n\n\n\f\n\u0100\t\n\u0003\n\u0102\b\n\u0001\n"+ - "\u0001\n\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\r\u0001"+ - "\r\u0001\r\u0005\r\u010e\b\r\n\r\f\r\u0111\t\r\u0001\u000e\u0001\u000e"+ - "\u0001\u000e\u0001\u000e\u0001\u000e\u0003\u000e\u0118\b\u000e\u0001\u000f"+ - "\u0001\u000f\u0001\u000f\u0001\u000f\u0005\u000f\u011e\b\u000f\n\u000f"+ - "\f\u000f\u0121\t\u000f\u0001\u000f\u0003\u000f\u0124\b\u000f\u0001\u000f"+ - "\u0003\u000f\u0127\b\u000f\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011"+ - "\u0001\u0011\u0001\u0011\u0005\u0011\u012f\b\u0011\n\u0011\f\u0011\u0132"+ - "\t\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001"+ - "\u0013\u0003\u0013\u013a\b\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001"+ - "\u0014\u0005\u0014\u0140\b\u0014\n\u0014\f\u0014\u0143\t\u0014\u0001\u0015"+ - "\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0016"+ - "\u0001\u0017\u0001\u0017\u0003\u0017\u014e\b\u0017\u0001\u0017\u0001\u0017"+ - "\u0003\u0017\u0152\b\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018"+ - "\u0003\u0018\u0158\b\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0005\u0019"+ - "\u015d\b\u0019\n\u0019\f\u0019\u0160\t\u0019\u0001\u001a\u0001\u001a\u0001"+ - "\u001a\u0005\u001a\u0165\b\u001a\n\u001a\f\u001a\u0168\t\u001a\u0001\u001b"+ - "\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001d"+ - "\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d"+ - "\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0005\u001d\u017b\b\u001d"+ - "\n\u001d\f\u001d\u017e\t\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001"+ - "\u001d\u0001\u001d\u0001\u001d\u0005\u001d\u0186\b\u001d\n\u001d\f\u001d"+ - "\u0189\t\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d"+ - "\u0001\u001d\u0005\u001d\u0191\b\u001d\n\u001d\f\u001d\u0194\t\u001d\u0001"+ - "\u001d\u0001\u001d\u0003\u001d\u0198\b\u001d\u0001\u001e\u0001\u001e\u0001"+ - "\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u01a1"+ - "\b\u001f\n\u001f\f\u001f\u01a4\t\u001f\u0001 \u0001 \u0003 \u01a8\b \u0001"+ - " \u0001 \u0003 \u01ac\b \u0001!\u0001!\u0001!\u0001!\u0005!\u01b2\b!\n"+ - "!\f!\u01b5\t!\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01bb\b\"\n\"\f"+ - "\"\u01be\t\"\u0001#\u0001#\u0001#\u0001#\u0005#\u01c4\b#\n#\f#\u01c7\t"+ - "#\u0001$\u0001$\u0001$\u0001$\u0001%\u0001%\u0001%\u0001%\u0003%\u01d1"+ - "\b%\u0001&\u0001&\u0001&\u0001&\u0001\'\u0001\'\u0001\'\u0001(\u0001("+ - "\u0001(\u0005(\u01dd\b(\n(\f(\u01e0\t(\u0001)\u0001)\u0001)\u0001)\u0001"+ - "*\u0001*\u0001+\u0001+\u0003+\u01ea\b+\u0001,\u0003,\u01ed\b,\u0001,\u0001"+ - ",\u0001-\u0003-\u01f2\b-\u0001-\u0001-\u0001.\u0001.\u0001/\u0001/\u0001"+ - "0\u00010\u00010\u00011\u00011\u00011\u00011\u00012\u00012\u00012\u0001"+ - "3\u00013\u00013\u00014\u00014\u00014\u00014\u00034\u020b\b4\u00014\u0001"+ - "4\u00014\u00014\u00054\u0211\b4\n4\f4\u0214\t4\u00034\u0216\b4\u00015"+ - "\u00015\u00015\u00035\u021b\b5\u00015\u00015\u00015\u0000\u0004\u0002"+ - "\n\u0010\u00126\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014"+ - "\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`bdfh"+ - "j\u0000\u0007\u0001\u0000<=\u0001\u0000>@\u0001\u0000CD\u0002\u0000 "+ - "$$\u0001\u0000\'(\u0002\u0000&&44\u0002\u0000557;\u0238\u0000l\u0001\u0000"+ - "\u0000\u0000\u0002o\u0001\u0000\u0000\u0000\u0004\u007f\u0001\u0000\u0000"+ - "\u0000\u0006\u008e\u0001\u0000\u0000\u0000\b\u0090\u0001\u0000\u0000\u0000"+ - "\n\u00af\u0001\u0000\u0000\u0000\f\u00ca\u0001\u0000\u0000\u0000\u000e"+ - "\u00d1\u0001\u0000\u0000\u0000\u0010\u00d7\u0001\u0000\u0000\u0000\u0012"+ - "\u00ec\u0001\u0000\u0000\u0000\u0014\u00f6\u0001\u0000\u0000\u0000\u0016"+ - "\u0105\u0001\u0000\u0000\u0000\u0018\u0107\u0001\u0000\u0000\u0000\u001a"+ - "\u010a\u0001\u0000\u0000\u0000\u001c\u0117\u0001\u0000\u0000\u0000\u001e"+ - "\u0119\u0001\u0000\u0000\u0000 \u0128\u0001\u0000\u0000\u0000\"\u012a"+ - "\u0001\u0000\u0000\u0000$\u0133\u0001\u0000\u0000\u0000&\u0139\u0001\u0000"+ - "\u0000\u0000(\u013b\u0001\u0000\u0000\u0000*\u0144\u0001\u0000\u0000\u0000"+ - ",\u0148\u0001\u0000\u0000\u0000.\u014b\u0001\u0000\u0000\u00000\u0153"+ - "\u0001\u0000\u0000\u00002\u0159\u0001\u0000\u0000\u00004\u0161\u0001\u0000"+ - "\u0000\u00006\u0169\u0001\u0000\u0000\u00008\u016b\u0001\u0000\u0000\u0000"+ - ":\u0197\u0001\u0000\u0000\u0000<\u0199\u0001\u0000\u0000\u0000>\u019c"+ - "\u0001\u0000\u0000\u0000@\u01a5\u0001\u0000\u0000\u0000B\u01ad\u0001\u0000"+ - "\u0000\u0000D\u01b6\u0001\u0000\u0000\u0000F\u01bf\u0001\u0000\u0000\u0000"+ - "H\u01c8\u0001\u0000\u0000\u0000J\u01cc\u0001\u0000\u0000\u0000L\u01d2"+ - "\u0001\u0000\u0000\u0000N\u01d6\u0001\u0000\u0000\u0000P\u01d9\u0001\u0000"+ - "\u0000\u0000R\u01e1\u0001\u0000\u0000\u0000T\u01e5\u0001\u0000\u0000\u0000"+ - "V\u01e9\u0001\u0000\u0000\u0000X\u01ec\u0001\u0000\u0000\u0000Z\u01f1"+ - "\u0001\u0000\u0000\u0000\\\u01f5\u0001\u0000\u0000\u0000^\u01f7\u0001"+ - "\u0000\u0000\u0000`\u01f9\u0001\u0000\u0000\u0000b\u01fc\u0001\u0000\u0000"+ - "\u0000d\u0200\u0001\u0000\u0000\u0000f\u0203\u0001\u0000\u0000\u0000h"+ - "\u0206\u0001\u0000\u0000\u0000j\u021a\u0001\u0000\u0000\u0000lm\u0003"+ - "\u0002\u0001\u0000mn\u0005\u0000\u0000\u0001n\u0001\u0001\u0000\u0000"+ - "\u0000op\u0006\u0001\uffff\uffff\u0000pq\u0003\u0004\u0002\u0000qw\u0001"+ - "\u0000\u0000\u0000rs\n\u0001\u0000\u0000st\u0005\u001a\u0000\u0000tv\u0003"+ - "\u0006\u0003\u0000ur\u0001\u0000\u0000\u0000vy\u0001\u0000\u0000\u0000"+ - "wu\u0001\u0000\u0000\u0000wx\u0001\u0000\u0000\u0000x\u0003\u0001\u0000"+ - "\u0000\u0000yw\u0001\u0000\u0000\u0000z\u0080\u0003`0\u0000{\u0080\u0003"+ - "\u001e\u000f\u0000|\u0080\u0003\u0018\f\u0000}\u0080\u0003d2\u0000~\u0080"+ - "\u0003f3\u0000\u007fz\u0001\u0000\u0000\u0000\u007f{\u0001\u0000\u0000"+ - "\u0000\u007f|\u0001\u0000\u0000\u0000\u007f}\u0001\u0000\u0000\u0000\u007f"+ - "~\u0001\u0000\u0000\u0000\u0080\u0005\u0001\u0000\u0000\u0000\u0081\u008f"+ - "\u0003,\u0016\u0000\u0082\u008f\u00030\u0018\u0000\u0083\u008f\u0003<"+ - "\u001e\u0000\u0084\u008f\u0003B!\u0000\u0085\u008f\u0003>\u001f\u0000"+ - "\u0086\u008f\u0003.\u0017\u0000\u0087\u008f\u0003\b\u0004\u0000\u0088"+ - "\u008f\u0003D\"\u0000\u0089\u008f\u0003F#\u0000\u008a\u008f\u0003J%\u0000"+ - "\u008b\u008f\u0003L&\u0000\u008c\u008f\u0003h4\u0000\u008d\u008f\u0003"+ - "N\'\u0000\u008e\u0081\u0001\u0000\u0000\u0000\u008e\u0082\u0001\u0000"+ - "\u0000\u0000\u008e\u0083\u0001\u0000\u0000\u0000\u008e\u0084\u0001\u0000"+ - "\u0000\u0000\u008e\u0085\u0001\u0000\u0000\u0000\u008e\u0086\u0001\u0000"+ - "\u0000\u0000\u008e\u0087\u0001\u0000\u0000\u0000\u008e\u0088\u0001\u0000"+ - "\u0000\u0000\u008e\u0089\u0001\u0000\u0000\u0000\u008e\u008a\u0001\u0000"+ - "\u0000\u0000\u008e\u008b\u0001\u0000\u0000\u0000\u008e\u008c\u0001\u0000"+ - "\u0000\u0000\u008e\u008d\u0001\u0000\u0000\u0000\u008f\u0007\u0001\u0000"+ - "\u0000\u0000\u0090\u0091\u0005\u0012\u0000\u0000\u0091\u0092\u0003\n\u0005"+ - "\u0000\u0092\t\u0001\u0000\u0000\u0000\u0093\u0094\u0006\u0005\uffff\uffff"+ - "\u0000\u0094\u0095\u0005-\u0000\u0000\u0095\u00b0\u0003\n\u0005\u0007"+ - "\u0096\u00b0\u0003\u000e\u0007\u0000\u0097\u00b0\u0003\f\u0006\u0000\u0098"+ - "\u009a\u0003\u000e\u0007\u0000\u0099\u009b\u0005-\u0000\u0000\u009a\u0099"+ - "\u0001\u0000\u0000\u0000\u009a\u009b\u0001\u0000\u0000\u0000\u009b\u009c"+ - "\u0001\u0000\u0000\u0000\u009c\u009d\u0005*\u0000\u0000\u009d\u009e\u0005"+ - ")\u0000\u0000\u009e\u00a3\u0003\u000e\u0007\u0000\u009f\u00a0\u0005#\u0000"+ - "\u0000\u00a0\u00a2\u0003\u000e\u0007\u0000\u00a1\u009f\u0001\u0000\u0000"+ - "\u0000\u00a2\u00a5\u0001\u0000\u0000\u0000\u00a3\u00a1\u0001\u0000\u0000"+ - "\u0000\u00a3\u00a4\u0001\u0000\u0000\u0000\u00a4\u00a6\u0001\u0000\u0000"+ - "\u0000\u00a5\u00a3\u0001\u0000\u0000\u0000\u00a6\u00a7\u00053\u0000\u0000"+ - "\u00a7\u00b0\u0001\u0000\u0000\u0000\u00a8\u00a9\u0003\u000e\u0007\u0000"+ - "\u00a9\u00ab\u0005+\u0000\u0000\u00aa\u00ac\u0005-\u0000\u0000\u00ab\u00aa"+ - "\u0001\u0000\u0000\u0000\u00ab\u00ac\u0001\u0000\u0000\u0000\u00ac\u00ad"+ - "\u0001\u0000\u0000\u0000\u00ad\u00ae\u0005.\u0000\u0000\u00ae\u00b0\u0001"+ - "\u0000\u0000\u0000\u00af\u0093\u0001\u0000\u0000\u0000\u00af\u0096\u0001"+ - "\u0000\u0000\u0000\u00af\u0097\u0001\u0000\u0000\u0000\u00af\u0098\u0001"+ - "\u0000\u0000\u0000\u00af\u00a8\u0001\u0000\u0000\u0000\u00b0\u00b9\u0001"+ - "\u0000\u0000\u0000\u00b1\u00b2\n\u0004\u0000\u0000\u00b2\u00b3\u0005\u001f"+ - "\u0000\u0000\u00b3\u00b8\u0003\n\u0005\u0005\u00b4\u00b5\n\u0003\u0000"+ - "\u0000\u00b5\u00b6\u00050\u0000\u0000\u00b6\u00b8\u0003\n\u0005\u0004"+ - "\u00b7\u00b1\u0001\u0000\u0000\u0000\u00b7\u00b4\u0001\u0000\u0000\u0000"+ - "\u00b8\u00bb\u0001\u0000\u0000\u0000\u00b9\u00b7\u0001\u0000\u0000\u0000"+ - "\u00b9\u00ba\u0001\u0000\u0000\u0000\u00ba\u000b\u0001\u0000\u0000\u0000"+ - "\u00bb\u00b9\u0001\u0000\u0000\u0000\u00bc\u00be\u0003\u000e\u0007\u0000"+ - "\u00bd\u00bf\u0005-\u0000\u0000\u00be\u00bd\u0001\u0000\u0000\u0000\u00be"+ - "\u00bf\u0001\u0000\u0000\u0000\u00bf\u00c0\u0001\u0000\u0000\u0000\u00c0"+ - "\u00c1\u0005,\u0000\u0000\u00c1\u00c2\u0003\\.\u0000\u00c2\u00cb\u0001"+ - "\u0000\u0000\u0000\u00c3\u00c5\u0003\u000e\u0007\u0000\u00c4\u00c6\u0005"+ - "-\u0000\u0000\u00c5\u00c4\u0001\u0000\u0000\u0000\u00c5\u00c6\u0001\u0000"+ - "\u0000\u0000\u00c6\u00c7\u0001\u0000\u0000\u0000\u00c7\u00c8\u00052\u0000"+ - "\u0000\u00c8\u00c9\u0003\\.\u0000\u00c9\u00cb\u0001\u0000\u0000\u0000"+ - "\u00ca\u00bc\u0001\u0000\u0000\u0000\u00ca\u00c3\u0001\u0000\u0000\u0000"+ - "\u00cb\r\u0001\u0000\u0000\u0000\u00cc\u00d2\u0003\u0010\b\u0000\u00cd"+ - "\u00ce\u0003\u0010\b\u0000\u00ce\u00cf\u0003^/\u0000\u00cf\u00d0\u0003"+ - "\u0010\b\u0000\u00d0\u00d2\u0001\u0000\u0000\u0000\u00d1\u00cc\u0001\u0000"+ - "\u0000\u0000\u00d1\u00cd\u0001\u0000\u0000\u0000\u00d2\u000f\u0001\u0000"+ - "\u0000\u0000\u00d3\u00d4\u0006\b\uffff\uffff\u0000\u00d4\u00d8\u0003\u0012"+ - "\t\u0000\u00d5\u00d6\u0007\u0000\u0000\u0000\u00d6\u00d8\u0003\u0010\b"+ - "\u0003\u00d7\u00d3\u0001\u0000\u0000\u0000\u00d7\u00d5\u0001\u0000\u0000"+ - "\u0000\u00d8\u00e1\u0001\u0000\u0000\u0000\u00d9\u00da\n\u0002\u0000\u0000"+ - "\u00da\u00db\u0007\u0001\u0000\u0000\u00db\u00e0\u0003\u0010\b\u0003\u00dc"+ - "\u00dd\n\u0001\u0000\u0000\u00dd\u00de\u0007\u0000\u0000\u0000\u00de\u00e0"+ - "\u0003\u0010\b\u0002\u00df\u00d9\u0001\u0000\u0000\u0000\u00df\u00dc\u0001"+ - "\u0000\u0000\u0000\u00e0\u00e3\u0001\u0000\u0000\u0000\u00e1\u00df\u0001"+ - "\u0000\u0000\u0000\u00e1\u00e2\u0001\u0000\u0000\u0000\u00e2\u0011\u0001"+ - "\u0000\u0000\u0000\u00e3\u00e1\u0001\u0000\u0000\u0000\u00e4\u00e5\u0006"+ - "\t\uffff\uffff\u0000\u00e5\u00ed\u0003:\u001d\u0000\u00e6\u00ed\u0003"+ - "2\u0019\u0000\u00e7\u00ed\u0003\u0014\n\u0000\u00e8\u00e9\u0005)\u0000"+ - "\u0000\u00e9\u00ea\u0003\n\u0005\u0000\u00ea\u00eb\u00053\u0000\u0000"+ - "\u00eb\u00ed\u0001\u0000\u0000\u0000\u00ec\u00e4\u0001\u0000\u0000\u0000"+ - "\u00ec\u00e6\u0001\u0000\u0000\u0000\u00ec\u00e7\u0001\u0000\u0000\u0000"+ - "\u00ec\u00e8\u0001\u0000\u0000\u0000\u00ed\u00f3\u0001\u0000\u0000\u0000"+ - "\u00ee\u00ef\n\u0001\u0000\u0000\u00ef\u00f0\u0005\"\u0000\u0000\u00f0"+ - "\u00f2\u0003\u0016\u000b\u0000\u00f1\u00ee\u0001\u0000\u0000\u0000\u00f2"+ - "\u00f5\u0001\u0000\u0000\u0000\u00f3\u00f1\u0001\u0000\u0000\u0000\u00f3"+ - "\u00f4\u0001\u0000\u0000\u0000\u00f4\u0013\u0001\u0000\u0000\u0000\u00f5"+ - "\u00f3\u0001\u0000\u0000\u0000\u00f6\u00f7\u00036\u001b\u0000\u00f7\u0101"+ - "\u0005)\u0000\u0000\u00f8\u0102\u0005>\u0000\u0000\u00f9\u00fe\u0003\n"+ - "\u0005\u0000\u00fa\u00fb\u0005#\u0000\u0000\u00fb\u00fd\u0003\n\u0005"+ - "\u0000\u00fc\u00fa\u0001\u0000\u0000\u0000\u00fd\u0100\u0001\u0000\u0000"+ - "\u0000\u00fe\u00fc\u0001\u0000\u0000\u0000\u00fe\u00ff\u0001\u0000\u0000"+ - "\u0000\u00ff\u0102\u0001\u0000\u0000\u0000\u0100\u00fe\u0001\u0000\u0000"+ - "\u0000\u0101\u00f8\u0001\u0000\u0000\u0000\u0101\u00f9\u0001\u0000\u0000"+ - "\u0000\u0101\u0102\u0001\u0000\u0000\u0000\u0102\u0103\u0001\u0000\u0000"+ - "\u0000\u0103\u0104\u00053\u0000\u0000\u0104\u0015\u0001\u0000\u0000\u0000"+ - "\u0105\u0106\u00036\u001b\u0000\u0106\u0017\u0001\u0000\u0000\u0000\u0107"+ - "\u0108\u0005\u000e\u0000\u0000\u0108\u0109\u0003\u001a\r\u0000\u0109\u0019"+ - "\u0001\u0000\u0000\u0000\u010a\u010f\u0003\u001c\u000e\u0000\u010b\u010c"+ - "\u0005#\u0000\u0000\u010c\u010e\u0003\u001c\u000e\u0000\u010d\u010b\u0001"+ - "\u0000\u0000\u0000\u010e\u0111\u0001\u0000\u0000\u0000\u010f\u010d\u0001"+ - "\u0000\u0000\u0000\u010f\u0110\u0001\u0000\u0000\u0000\u0110\u001b\u0001"+ - "\u0000\u0000\u0000\u0111\u010f\u0001\u0000\u0000\u0000\u0112\u0118\u0003"+ - "\n\u0005\u0000\u0113\u0114\u00032\u0019\u0000\u0114\u0115\u0005!\u0000"+ - "\u0000\u0115\u0116\u0003\n\u0005\u0000\u0116\u0118\u0001\u0000\u0000\u0000"+ - "\u0117\u0112\u0001\u0000\u0000\u0000\u0117\u0113\u0001\u0000\u0000\u0000"+ - "\u0118\u001d\u0001\u0000\u0000\u0000\u0119\u011a\u0005\u0006\u0000\u0000"+ - "\u011a\u011f\u0003 \u0010\u0000\u011b\u011c\u0005#\u0000\u0000\u011c\u011e"+ - "\u0003 \u0010\u0000\u011d\u011b\u0001\u0000\u0000\u0000\u011e\u0121\u0001"+ - "\u0000\u0000\u0000\u011f\u011d\u0001\u0000\u0000\u0000\u011f\u0120\u0001"+ - "\u0000\u0000\u0000\u0120\u0123\u0001\u0000\u0000\u0000\u0121\u011f\u0001"+ - "\u0000\u0000\u0000\u0122\u0124\u0003&\u0013\u0000\u0123\u0122\u0001\u0000"+ - "\u0000\u0000\u0123\u0124\u0001\u0000\u0000\u0000\u0124\u0126\u0001\u0000"+ - "\u0000\u0000\u0125\u0127\u0003\"\u0011\u0000\u0126\u0125\u0001\u0000\u0000"+ - "\u0000\u0126\u0127\u0001\u0000\u0000\u0000\u0127\u001f\u0001\u0000\u0000"+ - "\u0000\u0128\u0129\u0005J\u0000\u0000\u0129!\u0001\u0000\u0000\u0000\u012a"+ - "\u012b\u0005H\u0000\u0000\u012b\u0130\u0003$\u0012\u0000\u012c\u012d\u0005"+ - "#\u0000\u0000\u012d\u012f\u0003$\u0012\u0000\u012e\u012c\u0001\u0000\u0000"+ - "\u0000\u012f\u0132\u0001\u0000\u0000\u0000\u0130\u012e\u0001\u0000\u0000"+ - "\u0000\u0130\u0131\u0001\u0000\u0000\u0000\u0131#\u0001\u0000\u0000\u0000"+ - "\u0132\u0130\u0001\u0000\u0000\u0000\u0133\u0134\u0003\\.\u0000\u0134"+ - "\u0135\u0005!\u0000\u0000\u0135\u0136\u0003\\.\u0000\u0136%\u0001\u0000"+ - "\u0000\u0000\u0137\u013a\u0003(\u0014\u0000\u0138\u013a\u0003*\u0015\u0000"+ - "\u0139\u0137\u0001\u0000\u0000\u0000\u0139\u0138\u0001\u0000\u0000\u0000"+ - "\u013a\'\u0001\u0000\u0000\u0000\u013b\u013c\u0005I\u0000\u0000\u013c"+ - "\u0141\u0003 \u0010\u0000\u013d\u013e\u0005#\u0000\u0000\u013e\u0140\u0003"+ - " \u0010\u0000\u013f\u013d\u0001\u0000\u0000\u0000\u0140\u0143\u0001\u0000"+ - "\u0000\u0000\u0141\u013f\u0001\u0000\u0000\u0000\u0141\u0142\u0001\u0000"+ - "\u0000\u0000\u0142)\u0001\u0000\u0000\u0000\u0143\u0141\u0001\u0000\u0000"+ - "\u0000\u0144\u0145\u0005A\u0000\u0000\u0145\u0146\u0003(\u0014\u0000\u0146"+ - "\u0147\u0005B\u0000\u0000\u0147+\u0001\u0000\u0000\u0000\u0148\u0149\u0005"+ - "\u0004\u0000\u0000\u0149\u014a\u0003\u001a\r\u0000\u014a-\u0001\u0000"+ - "\u0000\u0000\u014b\u014d\u0005\u0011\u0000\u0000\u014c\u014e\u0003\u001a"+ - "\r\u0000\u014d\u014c\u0001\u0000\u0000\u0000\u014d\u014e\u0001\u0000\u0000"+ - "\u0000\u014e\u0151\u0001\u0000\u0000\u0000\u014f\u0150\u0005\u001e\u0000"+ - "\u0000\u0150\u0152\u0003\u001a\r\u0000\u0151\u014f\u0001\u0000\u0000\u0000"+ - "\u0151\u0152\u0001\u0000\u0000\u0000\u0152/\u0001\u0000\u0000\u0000\u0153"+ - "\u0154\u0005\b\u0000\u0000\u0154\u0157\u0003\u001a\r\u0000\u0155\u0156"+ - "\u0005\u001e\u0000\u0000\u0156\u0158\u0003\u001a\r\u0000\u0157\u0155\u0001"+ - "\u0000\u0000\u0000\u0157\u0158\u0001\u0000\u0000\u0000\u01581\u0001\u0000"+ - "\u0000\u0000\u0159\u015e\u00036\u001b\u0000\u015a\u015b\u0005%\u0000\u0000"+ - "\u015b\u015d\u00036\u001b\u0000\u015c\u015a\u0001\u0000\u0000\u0000\u015d"+ - "\u0160\u0001\u0000\u0000\u0000\u015e\u015c\u0001\u0000\u0000\u0000\u015e"+ - "\u015f\u0001\u0000\u0000\u0000\u015f3\u0001\u0000\u0000\u0000\u0160\u015e"+ - "\u0001\u0000\u0000\u0000\u0161\u0166\u00038\u001c\u0000\u0162\u0163\u0005"+ - "%\u0000\u0000\u0163\u0165\u00038\u001c\u0000\u0164\u0162\u0001\u0000\u0000"+ - "\u0000\u0165\u0168\u0001\u0000\u0000\u0000\u0166\u0164\u0001\u0000\u0000"+ - "\u0000\u0166\u0167\u0001\u0000\u0000\u0000\u01675\u0001\u0000\u0000\u0000"+ - "\u0168\u0166\u0001\u0000\u0000\u0000\u0169\u016a\u0007\u0002\u0000\u0000"+ - "\u016a7\u0001\u0000\u0000\u0000\u016b\u016c\u0005N\u0000\u0000\u016c9"+ - "\u0001\u0000\u0000\u0000\u016d\u0198\u0005.\u0000\u0000\u016e\u016f\u0003"+ - "Z-\u0000\u016f\u0170\u0005C\u0000\u0000\u0170\u0198\u0001\u0000\u0000"+ - "\u0000\u0171\u0198\u0003X,\u0000\u0172\u0198\u0003Z-\u0000\u0173\u0198"+ - "\u0003T*\u0000\u0174\u0198\u00051\u0000\u0000\u0175\u0198\u0003\\.\u0000"+ - "\u0176\u0177\u0005A\u0000\u0000\u0177\u017c\u0003V+\u0000\u0178\u0179"+ - "\u0005#\u0000\u0000\u0179\u017b\u0003V+\u0000\u017a\u0178\u0001\u0000"+ - "\u0000\u0000\u017b\u017e\u0001\u0000\u0000\u0000\u017c\u017a\u0001\u0000"+ - "\u0000\u0000\u017c\u017d\u0001\u0000\u0000\u0000\u017d\u017f\u0001\u0000"+ - "\u0000\u0000\u017e\u017c\u0001\u0000\u0000\u0000\u017f\u0180\u0005B\u0000"+ - "\u0000\u0180\u0198\u0001\u0000\u0000\u0000\u0181\u0182\u0005A\u0000\u0000"+ - "\u0182\u0187\u0003T*\u0000\u0183\u0184\u0005#\u0000\u0000\u0184\u0186"+ - "\u0003T*\u0000\u0185\u0183\u0001\u0000\u0000\u0000\u0186\u0189\u0001\u0000"+ - "\u0000\u0000\u0187\u0185\u0001\u0000\u0000\u0000\u0187\u0188\u0001\u0000"+ - "\u0000\u0000\u0188\u018a\u0001\u0000\u0000\u0000\u0189\u0187\u0001\u0000"+ - "\u0000\u0000\u018a\u018b\u0005B\u0000\u0000\u018b\u0198\u0001\u0000\u0000"+ - "\u0000\u018c\u018d\u0005A\u0000\u0000\u018d\u0192\u0003\\.\u0000\u018e"+ - "\u018f\u0005#\u0000\u0000\u018f\u0191\u0003\\.\u0000\u0190\u018e\u0001"+ - "\u0000\u0000\u0000\u0191\u0194\u0001\u0000\u0000\u0000\u0192\u0190\u0001"+ - "\u0000\u0000\u0000\u0192\u0193\u0001\u0000\u0000\u0000\u0193\u0195\u0001"+ - "\u0000\u0000\u0000\u0194\u0192\u0001\u0000\u0000\u0000\u0195\u0196\u0005"+ - "B\u0000\u0000\u0196\u0198\u0001\u0000\u0000\u0000\u0197\u016d\u0001\u0000"+ - "\u0000\u0000\u0197\u016e\u0001\u0000\u0000\u0000\u0197\u0171\u0001\u0000"+ - "\u0000\u0000\u0197\u0172\u0001\u0000\u0000\u0000\u0197\u0173\u0001\u0000"+ - "\u0000\u0000\u0197\u0174\u0001\u0000\u0000\u0000\u0197\u0175\u0001\u0000"+ - "\u0000\u0000\u0197\u0176\u0001\u0000\u0000\u0000\u0197\u0181\u0001\u0000"+ - "\u0000\u0000\u0197\u018c\u0001\u0000\u0000\u0000\u0198;\u0001\u0000\u0000"+ - "\u0000\u0199\u019a\u0005\n\u0000\u0000\u019a\u019b\u0005\u001c\u0000\u0000"+ - "\u019b=\u0001\u0000\u0000\u0000\u019c\u019d\u0005\u0010\u0000\u0000\u019d"+ - "\u01a2\u0003@ \u0000\u019e\u019f\u0005#\u0000\u0000\u019f\u01a1\u0003"+ - "@ \u0000\u01a0\u019e\u0001\u0000\u0000\u0000\u01a1\u01a4\u0001\u0000\u0000"+ - "\u0000\u01a2\u01a0\u0001\u0000\u0000\u0000\u01a2\u01a3\u0001\u0000\u0000"+ - "\u0000\u01a3?\u0001\u0000\u0000\u0000\u01a4\u01a2\u0001\u0000\u0000\u0000"+ - "\u01a5\u01a7\u0003\n\u0005\u0000\u01a6\u01a8\u0007\u0003\u0000\u0000\u01a7"+ - "\u01a6\u0001\u0000\u0000\u0000\u01a7\u01a8\u0001\u0000\u0000\u0000\u01a8"+ - "\u01ab\u0001\u0000\u0000\u0000\u01a9\u01aa\u0005/\u0000\u0000\u01aa\u01ac"+ - "\u0007\u0004\u0000\u0000\u01ab\u01a9\u0001\u0000\u0000\u0000\u01ab\u01ac"+ - "\u0001\u0000\u0000\u0000\u01acA\u0001\u0000\u0000\u0000\u01ad\u01ae\u0005"+ - "\t\u0000\u0000\u01ae\u01b3\u00034\u001a\u0000\u01af\u01b0\u0005#\u0000"+ - "\u0000\u01b0\u01b2\u00034\u001a\u0000\u01b1\u01af\u0001\u0000\u0000\u0000"+ - "\u01b2\u01b5\u0001\u0000\u0000\u0000\u01b3\u01b1\u0001\u0000\u0000\u0000"+ - "\u01b3\u01b4\u0001\u0000\u0000\u0000\u01b4C\u0001\u0000\u0000\u0000\u01b5"+ - "\u01b3\u0001\u0000\u0000\u0000\u01b6\u01b7\u0005\u0002\u0000\u0000\u01b7"+ - "\u01bc\u00034\u001a\u0000\u01b8\u01b9\u0005#\u0000\u0000\u01b9\u01bb\u0003"+ - "4\u001a\u0000\u01ba\u01b8\u0001\u0000\u0000\u0000\u01bb\u01be\u0001\u0000"+ - "\u0000\u0000\u01bc\u01ba\u0001\u0000\u0000\u0000\u01bc\u01bd\u0001\u0000"+ - "\u0000\u0000\u01bdE\u0001\u0000\u0000\u0000\u01be\u01bc\u0001\u0000\u0000"+ - "\u0000\u01bf\u01c0\u0005\r\u0000\u0000\u01c0\u01c5\u0003H$\u0000\u01c1"+ - "\u01c2\u0005#\u0000\u0000\u01c2\u01c4\u0003H$\u0000\u01c3\u01c1\u0001"+ - "\u0000\u0000\u0000\u01c4\u01c7\u0001\u0000\u0000\u0000\u01c5\u01c3\u0001"+ - "\u0000\u0000\u0000\u01c5\u01c6\u0001\u0000\u0000\u0000\u01c6G\u0001\u0000"+ - "\u0000\u0000\u01c7\u01c5\u0001\u0000\u0000\u0000\u01c8\u01c9\u00034\u001a"+ - "\u0000\u01c9\u01ca\u0005R\u0000\u0000\u01ca\u01cb\u00034\u001a\u0000\u01cb"+ - "I\u0001\u0000\u0000\u0000\u01cc\u01cd\u0005\u0001\u0000\u0000\u01cd\u01ce"+ - "\u0003\u0012\t\u0000\u01ce\u01d0\u0003\\.\u0000\u01cf\u01d1\u0003P(\u0000"+ - "\u01d0\u01cf\u0001\u0000\u0000\u0000\u01d0\u01d1\u0001\u0000\u0000\u0000"+ - "\u01d1K\u0001\u0000\u0000\u0000\u01d2\u01d3\u0005\u0007\u0000\u0000\u01d3"+ - "\u01d4\u0003\u0012\t\u0000\u01d4\u01d5\u0003\\.\u0000\u01d5M\u0001\u0000"+ - "\u0000\u0000\u01d6\u01d7\u0005\f\u0000\u0000\u01d7\u01d8\u00032\u0019"+ - "\u0000\u01d8O\u0001\u0000\u0000\u0000\u01d9\u01de\u0003R)\u0000\u01da"+ - "\u01db\u0005#\u0000\u0000\u01db\u01dd\u0003R)\u0000\u01dc\u01da\u0001"+ - "\u0000\u0000\u0000\u01dd\u01e0\u0001\u0000\u0000\u0000\u01de\u01dc\u0001"+ - "\u0000\u0000\u0000\u01de\u01df\u0001\u0000\u0000\u0000\u01dfQ\u0001\u0000"+ - "\u0000\u0000\u01e0\u01de\u0001\u0000\u0000\u0000\u01e1\u01e2\u00036\u001b"+ - "\u0000\u01e2\u01e3\u0005!\u0000\u0000\u01e3\u01e4\u0003:\u001d\u0000\u01e4"+ - "S\u0001\u0000\u0000\u0000\u01e5\u01e6\u0007\u0005\u0000\u0000\u01e6U\u0001"+ - "\u0000\u0000\u0000\u01e7\u01ea\u0003X,\u0000\u01e8\u01ea\u0003Z-\u0000"+ - "\u01e9\u01e7\u0001\u0000\u0000\u0000\u01e9\u01e8\u0001\u0000\u0000\u0000"+ - "\u01eaW\u0001\u0000\u0000\u0000\u01eb\u01ed\u0007\u0000\u0000\u0000\u01ec"+ - "\u01eb\u0001\u0000\u0000\u0000\u01ec\u01ed\u0001\u0000\u0000\u0000\u01ed"+ - "\u01ee\u0001\u0000\u0000\u0000\u01ee\u01ef\u0005\u001d\u0000\u0000\u01ef"+ - "Y\u0001\u0000\u0000\u0000\u01f0\u01f2\u0007\u0000\u0000\u0000\u01f1\u01f0"+ - "\u0001\u0000\u0000\u0000\u01f1\u01f2\u0001\u0000\u0000\u0000\u01f2\u01f3"+ - "\u0001\u0000\u0000\u0000\u01f3\u01f4\u0005\u001c\u0000\u0000\u01f4[\u0001"+ - "\u0000\u0000\u0000\u01f5\u01f6\u0005\u001b\u0000\u0000\u01f6]\u0001\u0000"+ - "\u0000\u0000\u01f7\u01f8\u0007\u0006\u0000\u0000\u01f8_\u0001\u0000\u0000"+ - "\u0000\u01f9\u01fa\u0005\u0005\u0000\u0000\u01fa\u01fb\u0003b1\u0000\u01fb"+ - "a\u0001\u0000\u0000\u0000\u01fc\u01fd\u0005A\u0000\u0000\u01fd\u01fe\u0003"+ - "\u0002\u0001\u0000\u01fe\u01ff\u0005B\u0000\u0000\u01ffc\u0001\u0000\u0000"+ - "\u0000\u0200\u0201\u0005\u000f\u0000\u0000\u0201\u0202\u0005b\u0000\u0000"+ - "\u0202e\u0001\u0000\u0000\u0000\u0203\u0204\u0005\u000b\u0000\u0000\u0204"+ - "\u0205\u0005f\u0000\u0000\u0205g\u0001\u0000\u0000\u0000\u0206\u0207\u0005"+ - "\u0003\u0000\u0000\u0207\u020a\u0005X\u0000\u0000\u0208\u0209\u0005V\u0000"+ - "\u0000\u0209\u020b\u00034\u001a\u0000\u020a\u0208\u0001\u0000\u0000\u0000"+ - "\u020a\u020b\u0001\u0000\u0000\u0000\u020b\u0215\u0001\u0000\u0000\u0000"+ - "\u020c\u020d\u0005W\u0000\u0000\u020d\u0212\u0003j5\u0000\u020e\u020f"+ - "\u0005#\u0000\u0000\u020f\u0211\u0003j5\u0000\u0210\u020e\u0001\u0000"+ - "\u0000\u0000\u0211\u0214\u0001\u0000\u0000\u0000\u0212\u0210\u0001\u0000"+ - "\u0000\u0000\u0212\u0213\u0001\u0000\u0000\u0000\u0213\u0216\u0001\u0000"+ - "\u0000\u0000\u0214\u0212\u0001\u0000\u0000\u0000\u0215\u020c\u0001\u0000"+ - "\u0000\u0000\u0215\u0216\u0001\u0000\u0000\u0000\u0216i\u0001\u0000\u0000"+ - "\u0000\u0217\u0218\u00034\u001a\u0000\u0218\u0219\u0005!\u0000\u0000\u0219"+ - "\u021b\u0001\u0000\u0000\u0000\u021a\u0217\u0001\u0000\u0000\u0000\u021a"+ - "\u021b\u0001\u0000\u0000\u0000\u021b\u021c\u0001\u0000\u0000\u0000\u021c"+ - "\u021d\u00034\u001a\u0000\u021dk\u0001\u0000\u0000\u00004w\u007f\u008e"+ - "\u009a\u00a3\u00ab\u00af\u00b7\u00b9\u00be\u00c5\u00ca\u00d1\u00d7\u00df"+ - "\u00e1\u00ec\u00f3\u00fe\u0101\u010f\u0117\u011f\u0123\u0126\u0130\u0139"+ - "\u0141\u014d\u0151\u0157\u015e\u0166\u017c\u0187\u0192\u0197\u01a2\u01a7"+ - "\u01ab\u01b3\u01bc\u01c5\u01d0\u01de\u01e9\u01ec\u01f1\u020a\u0212\u0215"+ - "\u021a"; + "2\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u00076\u0001"+ + "\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0005\u0001x\b\u0001\n\u0001\f\u0001{\t"+ + "\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ + "\u0002\u0003\u0002\u0083\b\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0003\u0003\u0092\b\u0003\u0001"+ + "\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u009e\b\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00a5"+ + "\b\u0005\n\u0005\f\u0005\u00a8\t\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0003\u0005\u00af\b\u0005\u0001\u0005\u0001\u0005"+ + "\u0003\u0005\u00b3\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0005\u0005\u00bb\b\u0005\n\u0005\f\u0005\u00be"+ + "\t\u0005\u0001\u0006\u0001\u0006\u0003\u0006\u00c2\b\u0006\u0001\u0006"+ + "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00c9\b\u0006"+ + "\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00ce\b\u0006\u0001\u0007"+ + "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0003\u0007\u00d5\b\u0007"+ + "\u0001\b\u0001\b\u0001\b\u0001\b\u0003\b\u00db\b\b\u0001\b\u0001\b\u0001"+ + "\b\u0001\b\u0001\b\u0001\b\u0005\b\u00e3\b\b\n\b\f\b\u00e6\t\b\u0001\t"+ + "\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0003\t\u00f0"+ + "\b\t\u0001\t\u0001\t\u0001\t\u0005\t\u00f5\b\t\n\t\f\t\u00f8\t\t\u0001"+ + "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0005\n\u0100\b\n\n\n\f\n\u0103"+ + "\t\n\u0003\n\u0105\b\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001\f"+ + "\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0005\r\u0111\b\r\n\r\f\r\u0114"+ + "\t\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0003"+ + "\u000e\u011b\b\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0005"+ + "\u000f\u0121\b\u000f\n\u000f\f\u000f\u0124\t\u000f\u0001\u000f\u0003\u000f"+ + "\u0127\b\u000f\u0001\u000f\u0003\u000f\u012a\b\u000f\u0001\u0010\u0001"+ + "\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0005\u0011\u0132"+ + "\b\u0011\n\u0011\f\u0011\u0135\t\u0011\u0001\u0012\u0001\u0012\u0001\u0012"+ + "\u0001\u0012\u0001\u0013\u0001\u0013\u0003\u0013\u013d\b\u0013\u0001\u0014"+ + "\u0001\u0014\u0001\u0014\u0001\u0014\u0005\u0014\u0143\b\u0014\n\u0014"+ + "\f\u0014\u0146\t\u0014\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015"+ + "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0005\u0016\u0150\b\u0016"+ + "\n\u0016\f\u0016\u0153\t\u0016\u0001\u0016\u0003\u0016\u0156\b\u0016\u0001"+ + "\u0016\u0001\u0016\u0003\u0016\u015a\b\u0016\u0001\u0017\u0001\u0017\u0001"+ + "\u0017\u0001\u0018\u0001\u0018\u0003\u0018\u0161\b\u0018\u0001\u0018\u0001"+ + "\u0018\u0003\u0018\u0165\b\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0001"+ + "\u0019\u0003\u0019\u016b\b\u0019\u0001\u001a\u0001\u001a\u0001\u001a\u0005"+ + "\u001a\u0170\b\u001a\n\u001a\f\u001a\u0173\t\u001a\u0001\u001b\u0001\u001b"+ + "\u0001\u001b\u0005\u001b\u0178\b\u001b\n\u001b\f\u001b\u017b\t\u001b\u0001"+ + "\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001"+ + "\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0001"+ + "\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0005\u001e\u018e"+ + "\b\u001e\n\u001e\f\u001e\u0191\t\u001e\u0001\u001e\u0001\u001e\u0001\u001e"+ + "\u0001\u001e\u0001\u001e\u0001\u001e\u0005\u001e\u0199\b\u001e\n\u001e"+ + "\f\u001e\u019c\t\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e"+ + "\u0001\u001e\u0001\u001e\u0005\u001e\u01a4\b\u001e\n\u001e\f\u001e\u01a7"+ + "\t\u001e\u0001\u001e\u0001\u001e\u0003\u001e\u01ab\b\u001e\u0001\u001f"+ + "\u0001\u001f\u0001\u001f\u0001 \u0001 \u0001 \u0001 \u0005 \u01b4\b \n"+ + " \f \u01b7\t \u0001!\u0001!\u0003!\u01bb\b!\u0001!\u0001!\u0003!\u01bf"+ + "\b!\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01c5\b\"\n\"\f\"\u01c8\t"+ + "\"\u0001#\u0001#\u0001#\u0001#\u0005#\u01ce\b#\n#\f#\u01d1\t#\u0001$\u0001"+ + "$\u0001$\u0001$\u0005$\u01d7\b$\n$\f$\u01da\t$\u0001%\u0001%\u0001%\u0001"+ + "%\u0001&\u0001&\u0001&\u0001&\u0003&\u01e4\b&\u0001\'\u0001\'\u0001\'"+ + "\u0001\'\u0001(\u0001(\u0001(\u0001)\u0001)\u0001)\u0005)\u01f0\b)\n)"+ + "\f)\u01f3\t)\u0001*\u0001*\u0001*\u0001*\u0001+\u0001+\u0001,\u0001,\u0003"+ + ",\u01fd\b,\u0001-\u0003-\u0200\b-\u0001-\u0001-\u0001.\u0003.\u0205\b"+ + ".\u0001.\u0001.\u0001/\u0001/\u00010\u00010\u00011\u00011\u00011\u0001"+ + "2\u00012\u00012\u00012\u00013\u00013\u00013\u00014\u00014\u00014\u0001"+ + "5\u00015\u00015\u00015\u00035\u021e\b5\u00015\u00015\u00015\u00015\u0005"+ + "5\u0224\b5\n5\f5\u0227\t5\u00035\u0229\b5\u00016\u00016\u00016\u00036"+ + "\u022e\b6\u00016\u00016\u00016\u0000\u0004\u0002\n\u0010\u00127\u0000"+ + "\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a\u001c"+ + "\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`bdfhjl\u0000\u0007\u0001\u0000"+ + ">?\u0001\u0000@B\u0001\u0000EF\u0002\u0000\"\"&&\u0001\u0000)*\u0002\u0000"+ + "((66\u0002\u0000779=\u024e\u0000n\u0001\u0000\u0000\u0000\u0002q\u0001"+ + "\u0000\u0000\u0000\u0004\u0082\u0001\u0000\u0000\u0000\u0006\u0091\u0001"+ + "\u0000\u0000\u0000\b\u0093\u0001\u0000\u0000\u0000\n\u00b2\u0001\u0000"+ + "\u0000\u0000\f\u00cd\u0001\u0000\u0000\u0000\u000e\u00d4\u0001\u0000\u0000"+ + "\u0000\u0010\u00da\u0001\u0000\u0000\u0000\u0012\u00ef\u0001\u0000\u0000"+ + "\u0000\u0014\u00f9\u0001\u0000\u0000\u0000\u0016\u0108\u0001\u0000\u0000"+ + "\u0000\u0018\u010a\u0001\u0000\u0000\u0000\u001a\u010d\u0001\u0000\u0000"+ + "\u0000\u001c\u011a\u0001\u0000\u0000\u0000\u001e\u011c\u0001\u0000\u0000"+ + "\u0000 \u012b\u0001\u0000\u0000\u0000\"\u012d\u0001\u0000\u0000\u0000"+ + "$\u0136\u0001\u0000\u0000\u0000&\u013c\u0001\u0000\u0000\u0000(\u013e"+ + "\u0001\u0000\u0000\u0000*\u0147\u0001\u0000\u0000\u0000,\u014b\u0001\u0000"+ + "\u0000\u0000.\u015b\u0001\u0000\u0000\u00000\u015e\u0001\u0000\u0000\u0000"+ + "2\u0166\u0001\u0000\u0000\u00004\u016c\u0001\u0000\u0000\u00006\u0174"+ + "\u0001\u0000\u0000\u00008\u017c\u0001\u0000\u0000\u0000:\u017e\u0001\u0000"+ + "\u0000\u0000<\u01aa\u0001\u0000\u0000\u0000>\u01ac\u0001\u0000\u0000\u0000"+ + "@\u01af\u0001\u0000\u0000\u0000B\u01b8\u0001\u0000\u0000\u0000D\u01c0"+ + "\u0001\u0000\u0000\u0000F\u01c9\u0001\u0000\u0000\u0000H\u01d2\u0001\u0000"+ + "\u0000\u0000J\u01db\u0001\u0000\u0000\u0000L\u01df\u0001\u0000\u0000\u0000"+ + "N\u01e5\u0001\u0000\u0000\u0000P\u01e9\u0001\u0000\u0000\u0000R\u01ec"+ + "\u0001\u0000\u0000\u0000T\u01f4\u0001\u0000\u0000\u0000V\u01f8\u0001\u0000"+ + "\u0000\u0000X\u01fc\u0001\u0000\u0000\u0000Z\u01ff\u0001\u0000\u0000\u0000"+ + "\\\u0204\u0001\u0000\u0000\u0000^\u0208\u0001\u0000\u0000\u0000`\u020a"+ + "\u0001\u0000\u0000\u0000b\u020c\u0001\u0000\u0000\u0000d\u020f\u0001\u0000"+ + "\u0000\u0000f\u0213\u0001\u0000\u0000\u0000h\u0216\u0001\u0000\u0000\u0000"+ + "j\u0219\u0001\u0000\u0000\u0000l\u022d\u0001\u0000\u0000\u0000no\u0003"+ + "\u0002\u0001\u0000op\u0005\u0000\u0000\u0001p\u0001\u0001\u0000\u0000"+ + "\u0000qr\u0006\u0001\uffff\uffff\u0000rs\u0003\u0004\u0002\u0000sy\u0001"+ + "\u0000\u0000\u0000tu\n\u0001\u0000\u0000uv\u0005\u001c\u0000\u0000vx\u0003"+ + "\u0006\u0003\u0000wt\u0001\u0000\u0000\u0000x{\u0001\u0000\u0000\u0000"+ + "yw\u0001\u0000\u0000\u0000yz\u0001\u0000\u0000\u0000z\u0003\u0001\u0000"+ + "\u0000\u0000{y\u0001\u0000\u0000\u0000|\u0083\u0003b1\u0000}\u0083\u0003"+ + "\u001e\u000f\u0000~\u0083\u0003\u0018\f\u0000\u007f\u0083\u0003,\u0016"+ + "\u0000\u0080\u0083\u0003f3\u0000\u0081\u0083\u0003h4\u0000\u0082|\u0001"+ + "\u0000\u0000\u0000\u0082}\u0001\u0000\u0000\u0000\u0082~\u0001\u0000\u0000"+ + "\u0000\u0082\u007f\u0001\u0000\u0000\u0000\u0082\u0080\u0001\u0000\u0000"+ + "\u0000\u0082\u0081\u0001\u0000\u0000\u0000\u0083\u0005\u0001\u0000\u0000"+ + "\u0000\u0084\u0092\u0003.\u0017\u0000\u0085\u0092\u00032\u0019\u0000\u0086"+ + "\u0092\u0003>\u001f\u0000\u0087\u0092\u0003D\"\u0000\u0088\u0092\u0003"+ + "@ \u0000\u0089\u0092\u00030\u0018\u0000\u008a\u0092\u0003\b\u0004\u0000"+ + "\u008b\u0092\u0003F#\u0000\u008c\u0092\u0003H$\u0000\u008d\u0092\u0003"+ + "L&\u0000\u008e\u0092\u0003N\'\u0000\u008f\u0092\u0003j5\u0000\u0090\u0092"+ + "\u0003P(\u0000\u0091\u0084\u0001\u0000\u0000\u0000\u0091\u0085\u0001\u0000"+ + "\u0000\u0000\u0091\u0086\u0001\u0000\u0000\u0000\u0091\u0087\u0001\u0000"+ + "\u0000\u0000\u0091\u0088\u0001\u0000\u0000\u0000\u0091\u0089\u0001\u0000"+ + "\u0000\u0000\u0091\u008a\u0001\u0000\u0000\u0000\u0091\u008b\u0001\u0000"+ + "\u0000\u0000\u0091\u008c\u0001\u0000\u0000\u0000\u0091\u008d\u0001\u0000"+ + "\u0000\u0000\u0091\u008e\u0001\u0000\u0000\u0000\u0091\u008f\u0001\u0000"+ + "\u0000\u0000\u0091\u0090\u0001\u0000\u0000\u0000\u0092\u0007\u0001\u0000"+ + "\u0000\u0000\u0093\u0094\u0005\u0013\u0000\u0000\u0094\u0095\u0003\n\u0005"+ + "\u0000\u0095\t\u0001\u0000\u0000\u0000\u0096\u0097\u0006\u0005\uffff\uffff"+ + "\u0000\u0097\u0098\u0005/\u0000\u0000\u0098\u00b3\u0003\n\u0005\u0007"+ + "\u0099\u00b3\u0003\u000e\u0007\u0000\u009a\u00b3\u0003\f\u0006\u0000\u009b"+ + "\u009d\u0003\u000e\u0007\u0000\u009c\u009e\u0005/\u0000\u0000\u009d\u009c"+ + "\u0001\u0000\u0000\u0000\u009d\u009e\u0001\u0000\u0000\u0000\u009e\u009f"+ + "\u0001\u0000\u0000\u0000\u009f\u00a0\u0005,\u0000\u0000\u00a0\u00a1\u0005"+ + "+\u0000\u0000\u00a1\u00a6\u0003\u000e\u0007\u0000\u00a2\u00a3\u0005%\u0000"+ + "\u0000\u00a3\u00a5\u0003\u000e\u0007\u0000\u00a4\u00a2\u0001\u0000\u0000"+ + "\u0000\u00a5\u00a8\u0001\u0000\u0000\u0000\u00a6\u00a4\u0001\u0000\u0000"+ + "\u0000\u00a6\u00a7\u0001\u0000\u0000\u0000\u00a7\u00a9\u0001\u0000\u0000"+ + "\u0000\u00a8\u00a6\u0001\u0000\u0000\u0000\u00a9\u00aa\u00055\u0000\u0000"+ + "\u00aa\u00b3\u0001\u0000\u0000\u0000\u00ab\u00ac\u0003\u000e\u0007\u0000"+ + "\u00ac\u00ae\u0005-\u0000\u0000\u00ad\u00af\u0005/\u0000\u0000\u00ae\u00ad"+ + "\u0001\u0000\u0000\u0000\u00ae\u00af\u0001\u0000\u0000\u0000\u00af\u00b0"+ + "\u0001\u0000\u0000\u0000\u00b0\u00b1\u00050\u0000\u0000\u00b1\u00b3\u0001"+ + "\u0000\u0000\u0000\u00b2\u0096\u0001\u0000\u0000\u0000\u00b2\u0099\u0001"+ + "\u0000\u0000\u0000\u00b2\u009a\u0001\u0000\u0000\u0000\u00b2\u009b\u0001"+ + "\u0000\u0000\u0000\u00b2\u00ab\u0001\u0000\u0000\u0000\u00b3\u00bc\u0001"+ + "\u0000\u0000\u0000\u00b4\u00b5\n\u0004\u0000\u0000\u00b5\u00b6\u0005!"+ + "\u0000\u0000\u00b6\u00bb\u0003\n\u0005\u0005\u00b7\u00b8\n\u0003\u0000"+ + "\u0000\u00b8\u00b9\u00052\u0000\u0000\u00b9\u00bb\u0003\n\u0005\u0004"+ + "\u00ba\u00b4\u0001\u0000\u0000\u0000\u00ba\u00b7\u0001\u0000\u0000\u0000"+ + "\u00bb\u00be\u0001\u0000\u0000\u0000\u00bc\u00ba\u0001\u0000\u0000\u0000"+ + "\u00bc\u00bd\u0001\u0000\u0000\u0000\u00bd\u000b\u0001\u0000\u0000\u0000"+ + "\u00be\u00bc\u0001\u0000\u0000\u0000\u00bf\u00c1\u0003\u000e\u0007\u0000"+ + "\u00c0\u00c2\u0005/\u0000\u0000\u00c1\u00c0\u0001\u0000\u0000\u0000\u00c1"+ + "\u00c2\u0001\u0000\u0000\u0000\u00c2\u00c3\u0001\u0000\u0000\u0000\u00c3"+ + "\u00c4\u0005.\u0000\u0000\u00c4\u00c5\u0003^/\u0000\u00c5\u00ce\u0001"+ + "\u0000\u0000\u0000\u00c6\u00c8\u0003\u000e\u0007\u0000\u00c7\u00c9\u0005"+ + "/\u0000\u0000\u00c8\u00c7\u0001\u0000\u0000\u0000\u00c8\u00c9\u0001\u0000"+ + "\u0000\u0000\u00c9\u00ca\u0001\u0000\u0000\u0000\u00ca\u00cb\u00054\u0000"+ + "\u0000\u00cb\u00cc\u0003^/\u0000\u00cc\u00ce\u0001\u0000\u0000\u0000\u00cd"+ + "\u00bf\u0001\u0000\u0000\u0000\u00cd\u00c6\u0001\u0000\u0000\u0000\u00ce"+ + "\r\u0001\u0000\u0000\u0000\u00cf\u00d5\u0003\u0010\b\u0000\u00d0\u00d1"+ + "\u0003\u0010\b\u0000\u00d1\u00d2\u0003`0\u0000\u00d2\u00d3\u0003\u0010"+ + "\b\u0000\u00d3\u00d5\u0001\u0000\u0000\u0000\u00d4\u00cf\u0001\u0000\u0000"+ + "\u0000\u00d4\u00d0\u0001\u0000\u0000\u0000\u00d5\u000f\u0001\u0000\u0000"+ + "\u0000\u00d6\u00d7\u0006\b\uffff\uffff\u0000\u00d7\u00db\u0003\u0012\t"+ + "\u0000\u00d8\u00d9\u0007\u0000\u0000\u0000\u00d9\u00db\u0003\u0010\b\u0003"+ + "\u00da\u00d6\u0001\u0000\u0000\u0000\u00da\u00d8\u0001\u0000\u0000\u0000"+ + "\u00db\u00e4\u0001\u0000\u0000\u0000\u00dc\u00dd\n\u0002\u0000\u0000\u00dd"+ + "\u00de\u0007\u0001\u0000\u0000\u00de\u00e3\u0003\u0010\b\u0003\u00df\u00e0"+ + "\n\u0001\u0000\u0000\u00e0\u00e1\u0007\u0000\u0000\u0000\u00e1\u00e3\u0003"+ + "\u0010\b\u0002\u00e2\u00dc\u0001\u0000\u0000\u0000\u00e2\u00df\u0001\u0000"+ + "\u0000\u0000\u00e3\u00e6\u0001\u0000\u0000\u0000\u00e4\u00e2\u0001\u0000"+ + "\u0000\u0000\u00e4\u00e5\u0001\u0000\u0000\u0000\u00e5\u0011\u0001\u0000"+ + "\u0000\u0000\u00e6\u00e4\u0001\u0000\u0000\u0000\u00e7\u00e8\u0006\t\uffff"+ + "\uffff\u0000\u00e8\u00f0\u0003<\u001e\u0000\u00e9\u00f0\u00034\u001a\u0000"+ + "\u00ea\u00f0\u0003\u0014\n\u0000\u00eb\u00ec\u0005+\u0000\u0000\u00ec"+ + "\u00ed\u0003\n\u0005\u0000\u00ed\u00ee\u00055\u0000\u0000\u00ee\u00f0"+ + "\u0001\u0000\u0000\u0000\u00ef\u00e7\u0001\u0000\u0000\u0000\u00ef\u00e9"+ + "\u0001\u0000\u0000\u0000\u00ef\u00ea\u0001\u0000\u0000\u0000\u00ef\u00eb"+ + "\u0001\u0000\u0000\u0000\u00f0\u00f6\u0001\u0000\u0000\u0000\u00f1\u00f2"+ + "\n\u0001\u0000\u0000\u00f2\u00f3\u0005$\u0000\u0000\u00f3\u00f5\u0003"+ + "\u0016\u000b\u0000\u00f4\u00f1\u0001\u0000\u0000\u0000\u00f5\u00f8\u0001"+ + "\u0000\u0000\u0000\u00f6\u00f4\u0001\u0000\u0000\u0000\u00f6\u00f7\u0001"+ + "\u0000\u0000\u0000\u00f7\u0013\u0001\u0000\u0000\u0000\u00f8\u00f6\u0001"+ + "\u0000\u0000\u0000\u00f9\u00fa\u00038\u001c\u0000\u00fa\u0104\u0005+\u0000"+ + "\u0000\u00fb\u0105\u0005@\u0000\u0000\u00fc\u0101\u0003\n\u0005\u0000"+ + "\u00fd\u00fe\u0005%\u0000\u0000\u00fe\u0100\u0003\n\u0005\u0000\u00ff"+ + "\u00fd\u0001\u0000\u0000\u0000\u0100\u0103\u0001\u0000\u0000\u0000\u0101"+ + "\u00ff\u0001\u0000\u0000\u0000\u0101\u0102\u0001\u0000\u0000\u0000\u0102"+ + "\u0105\u0001\u0000\u0000\u0000\u0103\u0101\u0001\u0000\u0000\u0000\u0104"+ + "\u00fb\u0001\u0000\u0000\u0000\u0104\u00fc\u0001\u0000\u0000\u0000\u0104"+ + "\u0105\u0001\u0000\u0000\u0000\u0105\u0106\u0001\u0000\u0000\u0000\u0106"+ + "\u0107\u00055\u0000\u0000\u0107\u0015\u0001\u0000\u0000\u0000\u0108\u0109"+ + "\u00038\u001c\u0000\u0109\u0017\u0001\u0000\u0000\u0000\u010a\u010b\u0005"+ + "\u000f\u0000\u0000\u010b\u010c\u0003\u001a\r\u0000\u010c\u0019\u0001\u0000"+ + "\u0000\u0000\u010d\u0112\u0003\u001c\u000e\u0000\u010e\u010f\u0005%\u0000"+ + "\u0000\u010f\u0111\u0003\u001c\u000e\u0000\u0110\u010e\u0001\u0000\u0000"+ + "\u0000\u0111\u0114\u0001\u0000\u0000\u0000\u0112\u0110\u0001\u0000\u0000"+ + "\u0000\u0112\u0113\u0001\u0000\u0000\u0000\u0113\u001b\u0001\u0000\u0000"+ + "\u0000\u0114\u0112\u0001\u0000\u0000\u0000\u0115\u011b\u0003\n\u0005\u0000"+ + "\u0116\u0117\u00034\u001a\u0000\u0117\u0118\u0005#\u0000\u0000\u0118\u0119"+ + "\u0003\n\u0005\u0000\u0119\u011b\u0001\u0000\u0000\u0000\u011a\u0115\u0001"+ + "\u0000\u0000\u0000\u011a\u0116\u0001\u0000\u0000\u0000\u011b\u001d\u0001"+ + "\u0000\u0000\u0000\u011c\u011d\u0005\u0006\u0000\u0000\u011d\u0122\u0003"+ + " \u0010\u0000\u011e\u011f\u0005%\u0000\u0000\u011f\u0121\u0003 \u0010"+ + "\u0000\u0120\u011e\u0001\u0000\u0000\u0000\u0121\u0124\u0001\u0000\u0000"+ + "\u0000\u0122\u0120\u0001\u0000\u0000\u0000\u0122\u0123\u0001\u0000\u0000"+ + "\u0000\u0123\u0126\u0001\u0000\u0000\u0000\u0124\u0122\u0001\u0000\u0000"+ + "\u0000\u0125\u0127\u0003&\u0013\u0000\u0126\u0125\u0001\u0000\u0000\u0000"+ + "\u0126\u0127\u0001\u0000\u0000\u0000\u0127\u0129\u0001\u0000\u0000\u0000"+ + "\u0128\u012a\u0003\"\u0011\u0000\u0129\u0128\u0001\u0000\u0000\u0000\u0129"+ + "\u012a\u0001\u0000\u0000\u0000\u012a\u001f\u0001\u0000\u0000\u0000\u012b"+ + "\u012c\u0005\u0018\u0000\u0000\u012c!\u0001\u0000\u0000\u0000\u012d\u012e"+ + "\u0005J\u0000\u0000\u012e\u0133\u0003$\u0012\u0000\u012f\u0130\u0005%"+ + "\u0000\u0000\u0130\u0132\u0003$\u0012\u0000\u0131\u012f\u0001\u0000\u0000"+ + "\u0000\u0132\u0135\u0001\u0000\u0000\u0000\u0133\u0131\u0001\u0000\u0000"+ + "\u0000\u0133\u0134\u0001\u0000\u0000\u0000\u0134#\u0001\u0000\u0000\u0000"+ + "\u0135\u0133\u0001\u0000\u0000\u0000\u0136\u0137\u0003^/\u0000\u0137\u0138"+ + "\u0005#\u0000\u0000\u0138\u0139\u0003^/\u0000\u0139%\u0001\u0000\u0000"+ + "\u0000\u013a\u013d\u0003(\u0014\u0000\u013b\u013d\u0003*\u0015\u0000\u013c"+ + "\u013a\u0001\u0000\u0000\u0000\u013c\u013b\u0001\u0000\u0000\u0000\u013d"+ + "\'\u0001\u0000\u0000\u0000\u013e\u013f\u0005K\u0000\u0000\u013f\u0144"+ + "\u0003 \u0010\u0000\u0140\u0141\u0005%\u0000\u0000\u0141\u0143\u0003 "+ + "\u0010\u0000\u0142\u0140\u0001\u0000\u0000\u0000\u0143\u0146\u0001\u0000"+ + "\u0000\u0000\u0144\u0142\u0001\u0000\u0000\u0000\u0144\u0145\u0001\u0000"+ + "\u0000\u0000\u0145)\u0001\u0000\u0000\u0000\u0146\u0144\u0001\u0000\u0000"+ + "\u0000\u0147\u0148\u0005C\u0000\u0000\u0148\u0149\u0003(\u0014\u0000\u0149"+ + "\u014a\u0005D\u0000\u0000\u014a+\u0001\u0000\u0000\u0000\u014b\u014c\u0005"+ + "\f\u0000\u0000\u014c\u0151\u0003 \u0010\u0000\u014d\u014e\u0005%\u0000"+ + "\u0000\u014e\u0150\u0003 \u0010\u0000\u014f\u014d\u0001\u0000\u0000\u0000"+ + "\u0150\u0153\u0001\u0000\u0000\u0000\u0151\u014f\u0001\u0000\u0000\u0000"+ + "\u0151\u0152\u0001\u0000\u0000\u0000\u0152\u0155\u0001\u0000\u0000\u0000"+ + "\u0153\u0151\u0001\u0000\u0000\u0000\u0154\u0156\u0003\u001a\r\u0000\u0155"+ + "\u0154\u0001\u0000\u0000\u0000\u0155\u0156\u0001\u0000\u0000\u0000\u0156"+ + "\u0159\u0001\u0000\u0000\u0000\u0157\u0158\u0005 \u0000\u0000\u0158\u015a"+ + "\u0003\u001a\r\u0000\u0159\u0157\u0001\u0000\u0000\u0000\u0159\u015a\u0001"+ + "\u0000\u0000\u0000\u015a-\u0001\u0000\u0000\u0000\u015b\u015c\u0005\u0004"+ + "\u0000\u0000\u015c\u015d\u0003\u001a\r\u0000\u015d/\u0001\u0000\u0000"+ + "\u0000\u015e\u0160\u0005\u0012\u0000\u0000\u015f\u0161\u0003\u001a\r\u0000"+ + "\u0160\u015f\u0001\u0000\u0000\u0000\u0160\u0161\u0001\u0000\u0000\u0000"+ + "\u0161\u0164\u0001\u0000\u0000\u0000\u0162\u0163\u0005 \u0000\u0000\u0163"+ + "\u0165\u0003\u001a\r\u0000\u0164\u0162\u0001\u0000\u0000\u0000\u0164\u0165"+ + "\u0001\u0000\u0000\u0000\u01651\u0001\u0000\u0000\u0000\u0166\u0167\u0005"+ + "\b\u0000\u0000\u0167\u016a\u0003\u001a\r\u0000\u0168\u0169\u0005 \u0000"+ + "\u0000\u0169\u016b\u0003\u001a\r\u0000\u016a\u0168\u0001\u0000\u0000\u0000"+ + "\u016a\u016b\u0001\u0000\u0000\u0000\u016b3\u0001\u0000\u0000\u0000\u016c"+ + "\u0171\u00038\u001c\u0000\u016d\u016e\u0005\'\u0000\u0000\u016e\u0170"+ + "\u00038\u001c\u0000\u016f\u016d\u0001\u0000\u0000\u0000\u0170\u0173\u0001"+ + "\u0000\u0000\u0000\u0171\u016f\u0001\u0000\u0000\u0000\u0171\u0172\u0001"+ + "\u0000\u0000\u0000\u01725\u0001\u0000\u0000\u0000\u0173\u0171\u0001\u0000"+ + "\u0000\u0000\u0174\u0179\u0003:\u001d\u0000\u0175\u0176\u0005\'\u0000"+ + "\u0000\u0176\u0178\u0003:\u001d\u0000\u0177\u0175\u0001\u0000\u0000\u0000"+ + "\u0178\u017b\u0001\u0000\u0000\u0000\u0179\u0177\u0001\u0000\u0000\u0000"+ + "\u0179\u017a\u0001\u0000\u0000\u0000\u017a7\u0001\u0000\u0000\u0000\u017b"+ + "\u0179\u0001\u0000\u0000\u0000\u017c\u017d\u0007\u0002\u0000\u0000\u017d"+ + "9\u0001\u0000\u0000\u0000\u017e\u017f\u0005O\u0000\u0000\u017f;\u0001"+ + "\u0000\u0000\u0000\u0180\u01ab\u00050\u0000\u0000\u0181\u0182\u0003\\"+ + ".\u0000\u0182\u0183\u0005E\u0000\u0000\u0183\u01ab\u0001\u0000\u0000\u0000"+ + "\u0184\u01ab\u0003Z-\u0000\u0185\u01ab\u0003\\.\u0000\u0186\u01ab\u0003"+ + "V+\u0000\u0187\u01ab\u00053\u0000\u0000\u0188\u01ab\u0003^/\u0000\u0189"+ + "\u018a\u0005C\u0000\u0000\u018a\u018f\u0003X,\u0000\u018b\u018c\u0005"+ + "%\u0000\u0000\u018c\u018e\u0003X,\u0000\u018d\u018b\u0001\u0000\u0000"+ + "\u0000\u018e\u0191\u0001\u0000\u0000\u0000\u018f\u018d\u0001\u0000\u0000"+ + "\u0000\u018f\u0190\u0001\u0000\u0000\u0000\u0190\u0192\u0001\u0000\u0000"+ + "\u0000\u0191\u018f\u0001\u0000\u0000\u0000\u0192\u0193\u0005D\u0000\u0000"+ + "\u0193\u01ab\u0001\u0000\u0000\u0000\u0194\u0195\u0005C\u0000\u0000\u0195"+ + "\u019a\u0003V+\u0000\u0196\u0197\u0005%\u0000\u0000\u0197\u0199\u0003"+ + "V+\u0000\u0198\u0196\u0001\u0000\u0000\u0000\u0199\u019c\u0001\u0000\u0000"+ + "\u0000\u019a\u0198\u0001\u0000\u0000\u0000\u019a\u019b\u0001\u0000\u0000"+ + "\u0000\u019b\u019d\u0001\u0000\u0000\u0000\u019c\u019a\u0001\u0000\u0000"+ + "\u0000\u019d\u019e\u0005D\u0000\u0000\u019e\u01ab\u0001\u0000\u0000\u0000"+ + "\u019f\u01a0\u0005C\u0000\u0000\u01a0\u01a5\u0003^/\u0000\u01a1\u01a2"+ + "\u0005%\u0000\u0000\u01a2\u01a4\u0003^/\u0000\u01a3\u01a1\u0001\u0000"+ + "\u0000\u0000\u01a4\u01a7\u0001\u0000\u0000\u0000\u01a5\u01a3\u0001\u0000"+ + "\u0000\u0000\u01a5\u01a6\u0001\u0000\u0000\u0000\u01a6\u01a8\u0001\u0000"+ + "\u0000\u0000\u01a7\u01a5\u0001\u0000\u0000\u0000\u01a8\u01a9\u0005D\u0000"+ + "\u0000\u01a9\u01ab\u0001\u0000\u0000\u0000\u01aa\u0180\u0001\u0000\u0000"+ + "\u0000\u01aa\u0181\u0001\u0000\u0000\u0000\u01aa\u0184\u0001\u0000\u0000"+ + "\u0000\u01aa\u0185\u0001\u0000\u0000\u0000\u01aa\u0186\u0001\u0000\u0000"+ + "\u0000\u01aa\u0187\u0001\u0000\u0000\u0000\u01aa\u0188\u0001\u0000\u0000"+ + "\u0000\u01aa\u0189\u0001\u0000\u0000\u0000\u01aa\u0194\u0001\u0000\u0000"+ + "\u0000\u01aa\u019f\u0001\u0000\u0000\u0000\u01ab=\u0001\u0000\u0000\u0000"+ + "\u01ac\u01ad\u0005\n\u0000\u0000\u01ad\u01ae\u0005\u001e\u0000\u0000\u01ae"+ + "?\u0001\u0000\u0000\u0000\u01af\u01b0\u0005\u0011\u0000\u0000\u01b0\u01b5"+ + "\u0003B!\u0000\u01b1\u01b2\u0005%\u0000\u0000\u01b2\u01b4\u0003B!\u0000"+ + "\u01b3\u01b1\u0001\u0000\u0000\u0000\u01b4\u01b7\u0001\u0000\u0000\u0000"+ + "\u01b5\u01b3\u0001\u0000\u0000\u0000\u01b5\u01b6\u0001\u0000\u0000\u0000"+ + "\u01b6A\u0001\u0000\u0000\u0000\u01b7\u01b5\u0001\u0000\u0000\u0000\u01b8"+ + "\u01ba\u0003\n\u0005\u0000\u01b9\u01bb\u0007\u0003\u0000\u0000\u01ba\u01b9"+ + "\u0001\u0000\u0000\u0000\u01ba\u01bb\u0001\u0000\u0000\u0000\u01bb\u01be"+ + "\u0001\u0000\u0000\u0000\u01bc\u01bd\u00051\u0000\u0000\u01bd\u01bf\u0007"+ + "\u0004\u0000\u0000\u01be\u01bc\u0001\u0000\u0000\u0000\u01be\u01bf\u0001"+ + "\u0000\u0000\u0000\u01bfC\u0001\u0000\u0000\u0000\u01c0\u01c1\u0005\t"+ + "\u0000\u0000\u01c1\u01c6\u00036\u001b\u0000\u01c2\u01c3\u0005%\u0000\u0000"+ + "\u01c3\u01c5\u00036\u001b\u0000\u01c4\u01c2\u0001\u0000\u0000\u0000\u01c5"+ + "\u01c8\u0001\u0000\u0000\u0000\u01c6\u01c4\u0001\u0000\u0000\u0000\u01c6"+ + "\u01c7\u0001\u0000\u0000\u0000\u01c7E\u0001\u0000\u0000\u0000\u01c8\u01c6"+ + "\u0001\u0000\u0000\u0000\u01c9\u01ca\u0005\u0002\u0000\u0000\u01ca\u01cf"+ + "\u00036\u001b\u0000\u01cb\u01cc\u0005%\u0000\u0000\u01cc\u01ce\u00036"+ + "\u001b\u0000\u01cd\u01cb\u0001\u0000\u0000\u0000\u01ce\u01d1\u0001\u0000"+ + "\u0000\u0000\u01cf\u01cd\u0001\u0000\u0000\u0000\u01cf\u01d0\u0001\u0000"+ + "\u0000\u0000\u01d0G\u0001\u0000\u0000\u0000\u01d1\u01cf\u0001\u0000\u0000"+ + "\u0000\u01d2\u01d3\u0005\u000e\u0000\u0000\u01d3\u01d8\u0003J%\u0000\u01d4"+ + "\u01d5\u0005%\u0000\u0000\u01d5\u01d7\u0003J%\u0000\u01d6\u01d4\u0001"+ + "\u0000\u0000\u0000\u01d7\u01da\u0001\u0000\u0000\u0000\u01d8\u01d6\u0001"+ + "\u0000\u0000\u0000\u01d8\u01d9\u0001\u0000\u0000\u0000\u01d9I\u0001\u0000"+ + "\u0000\u0000\u01da\u01d8\u0001\u0000\u0000\u0000\u01db\u01dc\u00036\u001b"+ + "\u0000\u01dc\u01dd\u0005S\u0000\u0000\u01dd\u01de\u00036\u001b\u0000\u01de"+ + "K\u0001\u0000\u0000\u0000\u01df\u01e0\u0005\u0001\u0000\u0000\u01e0\u01e1"+ + "\u0003\u0012\t\u0000\u01e1\u01e3\u0003^/\u0000\u01e2\u01e4\u0003R)\u0000"+ + "\u01e3\u01e2\u0001\u0000\u0000\u0000\u01e3\u01e4\u0001\u0000\u0000\u0000"+ + "\u01e4M\u0001\u0000\u0000\u0000\u01e5\u01e6\u0005\u0007\u0000\u0000\u01e6"+ + "\u01e7\u0003\u0012\t\u0000\u01e7\u01e8\u0003^/\u0000\u01e8O\u0001\u0000"+ + "\u0000\u0000\u01e9\u01ea\u0005\r\u0000\u0000\u01ea\u01eb\u00034\u001a"+ + "\u0000\u01ebQ\u0001\u0000\u0000\u0000\u01ec\u01f1\u0003T*\u0000\u01ed"+ + "\u01ee\u0005%\u0000\u0000\u01ee\u01f0\u0003T*\u0000\u01ef\u01ed\u0001"+ + "\u0000\u0000\u0000\u01f0\u01f3\u0001\u0000\u0000\u0000\u01f1\u01ef\u0001"+ + "\u0000\u0000\u0000\u01f1\u01f2\u0001\u0000\u0000\u0000\u01f2S\u0001\u0000"+ + "\u0000\u0000\u01f3\u01f1\u0001\u0000\u0000\u0000\u01f4\u01f5\u00038\u001c"+ + "\u0000\u01f5\u01f6\u0005#\u0000\u0000\u01f6\u01f7\u0003<\u001e\u0000\u01f7"+ + "U\u0001\u0000\u0000\u0000\u01f8\u01f9\u0007\u0005\u0000\u0000\u01f9W\u0001"+ + "\u0000\u0000\u0000\u01fa\u01fd\u0003Z-\u0000\u01fb\u01fd\u0003\\.\u0000"+ + "\u01fc\u01fa\u0001\u0000\u0000\u0000\u01fc\u01fb\u0001\u0000\u0000\u0000"+ + "\u01fdY\u0001\u0000\u0000\u0000\u01fe\u0200\u0007\u0000\u0000\u0000\u01ff"+ + "\u01fe\u0001\u0000\u0000\u0000\u01ff\u0200\u0001\u0000\u0000\u0000\u0200"+ + "\u0201\u0001\u0000\u0000\u0000\u0201\u0202\u0005\u001f\u0000\u0000\u0202"+ + "[\u0001\u0000\u0000\u0000\u0203\u0205\u0007\u0000\u0000\u0000\u0204\u0203"+ + "\u0001\u0000\u0000\u0000\u0204\u0205\u0001\u0000\u0000\u0000\u0205\u0206"+ + "\u0001\u0000\u0000\u0000\u0206\u0207\u0005\u001e\u0000\u0000\u0207]\u0001"+ + "\u0000\u0000\u0000\u0208\u0209\u0005\u001d\u0000\u0000\u0209_\u0001\u0000"+ + "\u0000\u0000\u020a\u020b\u0007\u0006\u0000\u0000\u020ba\u0001\u0000\u0000"+ + "\u0000\u020c\u020d\u0005\u0005\u0000\u0000\u020d\u020e\u0003d2\u0000\u020e"+ + "c\u0001\u0000\u0000\u0000\u020f\u0210\u0005C\u0000\u0000\u0210\u0211\u0003"+ + "\u0002\u0001\u0000\u0211\u0212\u0005D\u0000\u0000\u0212e\u0001\u0000\u0000"+ + "\u0000\u0213\u0214\u0005\u0010\u0000\u0000\u0214\u0215\u0005c\u0000\u0000"+ + "\u0215g\u0001\u0000\u0000\u0000\u0216\u0217\u0005\u000b\u0000\u0000\u0217"+ + "\u0218\u0005g\u0000\u0000\u0218i\u0001\u0000\u0000\u0000\u0219\u021a\u0005"+ + "\u0003\u0000\u0000\u021a\u021d\u0005Y\u0000\u0000\u021b\u021c\u0005W\u0000"+ + "\u0000\u021c\u021e\u00036\u001b\u0000\u021d\u021b\u0001\u0000\u0000\u0000"+ + "\u021d\u021e\u0001\u0000\u0000\u0000\u021e\u0228\u0001\u0000\u0000\u0000"+ + "\u021f\u0220\u0005X\u0000\u0000\u0220\u0225\u0003l6\u0000\u0221\u0222"+ + "\u0005%\u0000\u0000\u0222\u0224\u0003l6\u0000\u0223\u0221\u0001\u0000"+ + "\u0000\u0000\u0224\u0227\u0001\u0000\u0000\u0000\u0225\u0223\u0001\u0000"+ + "\u0000\u0000\u0225\u0226\u0001\u0000\u0000\u0000\u0226\u0229\u0001\u0000"+ + "\u0000\u0000\u0227\u0225\u0001\u0000\u0000\u0000\u0228\u021f\u0001\u0000"+ + "\u0000\u0000\u0228\u0229\u0001\u0000\u0000\u0000\u0229k\u0001\u0000\u0000"+ + "\u0000\u022a\u022b\u00036\u001b\u0000\u022b\u022c\u0005#\u0000\u0000\u022c"+ + "\u022e\u0001\u0000\u0000\u0000\u022d\u022a\u0001\u0000\u0000\u0000\u022d"+ + "\u022e\u0001\u0000\u0000\u0000\u022e\u022f\u0001\u0000\u0000\u0000\u022f"+ + "\u0230\u00036\u001b\u0000\u0230m\u0001\u0000\u0000\u00007y\u0082\u0091"+ + "\u009d\u00a6\u00ae\u00b2\u00ba\u00bc\u00c1\u00c8\u00cd\u00d4\u00da\u00e2"+ + "\u00e4\u00ef\u00f6\u0101\u0104\u0112\u011a\u0122\u0126\u0129\u0133\u013c"+ + "\u0144\u0151\u0155\u0159\u0160\u0164\u016a\u0171\u0179\u018f\u019a\u01a5"+ + "\u01aa\u01b5\u01ba\u01be\u01c6\u01cf\u01d8\u01e3\u01f1\u01fc\u01ff\u0204"+ + "\u021d\u0225\u0228\u022d"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java index 5122eb07371b..92c9793fd8d9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java @@ -365,13 +365,13 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { * *

The default implementation does nothing.

*/ - @Override public void enterFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx) { } + @Override public void enterIndexIdentifier(EsqlBaseParser.IndexIdentifierContext ctx) { } /** * {@inheritDoc} * *

The default implementation does nothing.

*/ - @Override public void exitFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx) { } + @Override public void exitIndexIdentifier(EsqlBaseParser.IndexIdentifierContext ctx) { } /** * {@inheritDoc} * @@ -432,6 +432,18 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { *

The default implementation does nothing.

*/ @Override public void exitDeprecated_metadata(EsqlBaseParser.Deprecated_metadataContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void enterMetricsCommand(EsqlBaseParser.MetricsCommandContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void exitMetricsCommand(EsqlBaseParser.MetricsCommandContext ctx) { } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java index a32ac9bd9100..25eb59648fe6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java @@ -221,7 +221,7 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx) { return visitChildren(ctx); } + @Override public T visitIndexIdentifier(EsqlBaseParser.IndexIdentifierContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -257,6 +257,13 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitDeprecated_metadata(EsqlBaseParser.Deprecated_metadataContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitMetricsCommand(EsqlBaseParser.MetricsCommandContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java index 6e8000f7fcf8..ac4047ffbd22 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java @@ -336,15 +336,15 @@ public interface EsqlBaseParserListener extends ParseTreeListener { */ void exitFromCommand(EsqlBaseParser.FromCommandContext ctx); /** - * Enter a parse tree produced by {@link EsqlBaseParser#fromIdentifier}. + * Enter a parse tree produced by {@link EsqlBaseParser#indexIdentifier}. * @param ctx the parse tree */ - void enterFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx); + void enterIndexIdentifier(EsqlBaseParser.IndexIdentifierContext ctx); /** - * Exit a parse tree produced by {@link EsqlBaseParser#fromIdentifier}. + * Exit a parse tree produced by {@link EsqlBaseParser#indexIdentifier}. * @param ctx the parse tree */ - void exitFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx); + void exitIndexIdentifier(EsqlBaseParser.IndexIdentifierContext ctx); /** * Enter a parse tree produced by {@link EsqlBaseParser#fromOptions}. * @param ctx the parse tree @@ -395,6 +395,16 @@ public interface EsqlBaseParserListener extends ParseTreeListener { * @param ctx the parse tree */ void exitDeprecated_metadata(EsqlBaseParser.Deprecated_metadataContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#metricsCommand}. + * @param ctx the parse tree + */ + void enterMetricsCommand(EsqlBaseParser.MetricsCommandContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#metricsCommand}. + * @param ctx the parse tree + */ + void exitMetricsCommand(EsqlBaseParser.MetricsCommandContext ctx); /** * Enter a parse tree produced by {@link EsqlBaseParser#evalCommand}. * @param ctx the parse tree diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java index d6e83b37a0f3..37b94cd585c1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java @@ -204,11 +204,11 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { */ T visitFromCommand(EsqlBaseParser.FromCommandContext ctx); /** - * Visit a parse tree produced by {@link EsqlBaseParser#fromIdentifier}. + * Visit a parse tree produced by {@link EsqlBaseParser#indexIdentifier}. * @param ctx the parse tree * @return the visitor result */ - T visitFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx); + T visitIndexIdentifier(EsqlBaseParser.IndexIdentifierContext ctx); /** * Visit a parse tree produced by {@link EsqlBaseParser#fromOptions}. * @param ctx the parse tree @@ -239,6 +239,12 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitDeprecated_metadata(EsqlBaseParser.Deprecated_metadataContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#metricsCommand}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitMetricsCommand(EsqlBaseParser.MetricsCommandContext ctx); /** * Visit a parse tree produced by {@link EsqlBaseParser#evalCommand}. * @param ctx the parse tree diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java index 7f0b5c73b9fb..b5e348589fa7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java @@ -9,8 +9,8 @@ import org.antlr.v4.runtime.tree.TerminalNode; import org.elasticsearch.common.Strings; -import org.elasticsearch.xpack.esql.parser.EsqlBaseParser.FromIdentifierContext; import org.elasticsearch.xpack.esql.parser.EsqlBaseParser.IdentifierContext; +import org.elasticsearch.xpack.esql.parser.EsqlBaseParser.IndexIdentifierContext; import java.util.List; @@ -24,8 +24,8 @@ public String visitIdentifier(IdentifierContext ctx) { } @Override - public String visitFromIdentifier(FromIdentifierContext ctx) { - return ctx == null ? null : unquoteIdentifier(null, ctx.FROM_UNQUOTED_IDENTIFIER()); + public String visitIndexIdentifier(IndexIdentifierContext ctx) { + return ctx == null ? null : unquoteIdentifier(null, ctx.INDEX_UNQUOTED_IDENTIFIER()); } protected static String unquoteIdentifier(TerminalNode quotedNode, TerminalNode unquotedNode) { @@ -42,7 +42,7 @@ protected static String unquoteIdString(String quotedString) { return quotedString.substring(1, quotedString.length() - 1).replace("``", "`"); } - public String visitFromIdentifiers(List ctx) { + public String visitIndexIdentifiers(List ctx) { return Strings.collectionToDelimitedString(visitList(this, ctx, String.class), ","); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index aea835c11ad3..b8fc29e4ef64 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -10,6 +10,7 @@ import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.tree.ParseTree; +import org.elasticsearch.Build; import org.elasticsearch.core.Tuple; import org.elasticsearch.dissect.DissectException; import org.elasticsearch.dissect.DissectParser; @@ -205,7 +206,7 @@ public LogicalPlan visitRowCommand(EsqlBaseParser.RowCommandContext ctx) { @Override public LogicalPlan visitFromCommand(EsqlBaseParser.FromCommandContext ctx) { Source source = source(ctx); - TableIdentifier table = new TableIdentifier(source, null, visitFromIdentifiers(ctx.fromIdentifier())); + TableIdentifier table = new TableIdentifier(source, null, visitIndexIdentifiers(ctx.indexIdentifier())); Map metadataMap = new LinkedHashMap<>(); if (ctx.metadata() != null) { var deprecatedContext = ctx.metadata().deprecated_metadata(); @@ -222,8 +223,8 @@ public LogicalPlan visitFromCommand(EsqlBaseParser.FromCommandContext ctx) { metadataOptionContext = ctx.metadata().metadataOption(); } - for (var c : metadataOptionContext.fromIdentifier()) { - String id = visitFromIdentifier(c); + for (var c : metadataOptionContext.indexIdentifier()) { + String id = visitIndexIdentifier(c); Source src = source(c); if (MetadataAttribute.isSupported(id) == false) { throw new ParsingException(src, "unsupported metadata field [" + id + "]"); @@ -253,10 +254,19 @@ public LogicalPlan visitFromCommand(EsqlBaseParser.FromCommandContext ctx) { @Override public PlanFactory visitStatsCommand(EsqlBaseParser.StatsCommandContext ctx) { - List aggregates = new ArrayList<>(visitFields(ctx.stats)); - List groupings = visitGrouping(ctx.grouping); + final Stats stats = stats(source(ctx), ctx.grouping, ctx.stats); + return input -> new EsqlAggregate(source(ctx), input, stats.groupings, stats.aggregates); + } + + private record Stats(List groupings, List aggregates) { + + } + + private Stats stats(Source source, EsqlBaseParser.FieldsContext groupingsCtx, EsqlBaseParser.FieldsContext aggregatesCtx) { + List groupings = visitGrouping(groupingsCtx); + List aggregates = new ArrayList<>(visitFields(aggregatesCtx)); if (aggregates.isEmpty() && groupings.isEmpty()) { - throw new ParsingException(source(ctx), "At least one aggregation or grouping expression required in [{}]", ctx.getText()); + throw new ParsingException(source, "At least one aggregation or grouping expression required in [{}]", source.text()); } // grouping keys are automatically added as aggregations however the user is not allowed to specify them if (groupings.isEmpty() == false && aggregates.isEmpty() == false) { @@ -279,8 +289,7 @@ public PlanFactory visitStatsCommand(EsqlBaseParser.StatsCommandContext ctx) { for (Expression group : groupings) { aggregates.add(Expressions.attribute(group)); } - - return input -> new EsqlAggregate(source(ctx), input, new ArrayList<>(groupings), aggregates); + return new Stats(new ArrayList<>(groupings), aggregates); } private void fail(Expression exp, String message, Object... args) { @@ -427,5 +436,20 @@ private static Tuple parsePolicyName(Token policyToken) { return new Tuple<>(mode, policyName); } + @Override + public LogicalPlan visitMetricsCommand(EsqlBaseParser.MetricsCommandContext ctx) { + if (Build.current().isSnapshot() == false) { + throw new IllegalArgumentException("METRICS command currently requires a snapshot build"); + } + Source source = source(ctx); + TableIdentifier table = new TableIdentifier(source, null, visitIndexIdentifiers(ctx.indexIdentifier())); + var unresolvedRelation = new EsqlUnresolvedRelation(source, table, List.of()); + if (ctx.aggregates == null && ctx.grouping == null) { + return unresolvedRelation; + } + final Stats stats = stats(source, ctx.grouping, ctx.aggregates); + return new EsqlAggregate(source, unresolvedRelation, stats.groupings, stats.aggregates); + } + interface PlanFactory extends Function {} } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index 1a36616cb647..ddd53cad8ec6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.parser; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Build; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.Randomness; import org.elasticsearch.core.Tuple; @@ -44,6 +45,7 @@ import org.elasticsearch.xpack.ql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.ql.expression.predicate.logical.Not; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.ql.plan.TableIdentifier; import org.elasticsearch.xpack.ql.plan.logical.Filter; import org.elasticsearch.xpack.ql.plan.logical.Limit; import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; @@ -1049,6 +1051,147 @@ public void testInlineConvertUnsupportedType() { expectError("ROW 3::BYTE", "line 1:6: Unsupported conversion to type [BYTE]"); } + public void testMetricsWithoutStats() { + assumeTrue("requires snapshot build", Build.current().isSnapshot()); + + assertStatement("METRICS foo", new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo"), List.of())); + assertStatement("METRICS foo,bar", new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo,bar"), List.of())); + assertStatement("METRICS foo*,bar", new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo*,bar"), List.of())); + assertStatement("METRICS foo-*,bar", new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo-*,bar"), List.of())); + assertStatement( + "METRICS foo-*,bar+*", + new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo-*,bar+*"), List.of()) + ); + } + + public void testMetricsIdentifiers() { + assumeTrue("requires snapshot build", Build.current().isSnapshot()); + Map patterns = Map.of( + "metrics foo,test-*", + "foo,test-*", + "metrics 123-test@foo_bar+baz1", + "123-test@foo_bar+baz1", + "metrics foo, test,xyz", + "foo,test,xyz", + "metrics >", + ">" + ); + for (Map.Entry e : patterns.entrySet()) { + assertStatement(e.getKey(), new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, e.getValue()), List.of())); + } + } + + public void testSimpleMetricsWithStats() { + assumeTrue("requires snapshot build", Build.current().isSnapshot()); + assertStatement( + "METRICS foo load=avg(cpu) BY ts", + new EsqlAggregate( + EMPTY, + new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo"), List.of()), + List.of(attribute("ts")), + List.of(new Alias(EMPTY, "load", new UnresolvedFunction(EMPTY, "avg", DEFAULT, List.of(attribute("cpu")))), attribute("ts")) + ) + ); + assertStatement( + "METRICS foo,bar load=avg(cpu) BY ts", + new EsqlAggregate( + EMPTY, + new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo,bar"), List.of()), + List.of(attribute("ts")), + List.of(new Alias(EMPTY, "load", new UnresolvedFunction(EMPTY, "avg", DEFAULT, List.of(attribute("cpu")))), attribute("ts")) + ) + ); + assertStatement( + "METRICS foo,bar load=avg(cpu),max(rate(requests)) BY ts", + new EsqlAggregate( + EMPTY, + new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo,bar"), List.of()), + List.of(attribute("ts")), + List.of( + new Alias(EMPTY, "load", new UnresolvedFunction(EMPTY, "avg", DEFAULT, List.of(attribute("cpu")))), + new Alias( + EMPTY, + "max(rate(requests))", + new UnresolvedFunction( + EMPTY, + "max", + DEFAULT, + List.of(new UnresolvedFunction(EMPTY, "rate", DEFAULT, List.of(attribute("requests")))) + ) + ), + attribute("ts") + ) + ) + ); + assertStatement( + "METRICS foo* count(errors)", + new EsqlAggregate( + EMPTY, + new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo*"), List.of()), + List.of(), + List.of(new Alias(EMPTY, "count(errors)", new UnresolvedFunction(EMPTY, "count", DEFAULT, List.of(attribute("errors"))))) + ) + ); + assertStatement( + "METRICS foo* a(b)", + new EsqlAggregate( + EMPTY, + new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo*"), List.of()), + List.of(), + List.of(new Alias(EMPTY, "a(b)", new UnresolvedFunction(EMPTY, "a", DEFAULT, List.of(attribute("b"))))) + ) + ); + assertStatement( + "METRICS foo* a(b)", + new EsqlAggregate( + EMPTY, + new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo*"), List.of()), + List.of(), + List.of(new Alias(EMPTY, "a(b)", new UnresolvedFunction(EMPTY, "a", DEFAULT, List.of(attribute("b"))))) + ) + ); + assertStatement( + "METRICS foo* a1(b2)", + new EsqlAggregate( + EMPTY, + new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo*"), List.of()), + List.of(), + List.of(new Alias(EMPTY, "a1(b2)", new UnresolvedFunction(EMPTY, "a1", DEFAULT, List.of(attribute("b2"))))) + ) + ); + assertStatement( + "METRICS foo*,bar* b = min(a) by c, d.e", + new EsqlAggregate( + EMPTY, + new EsqlUnresolvedRelation(EMPTY, new TableIdentifier(EMPTY, null, "foo*,bar*"), List.of()), + List.of(attribute("c"), attribute("d.e")), + List.of( + new Alias(EMPTY, "b", new UnresolvedFunction(EMPTY, "min", DEFAULT, List.of(attribute("a")))), + attribute("c"), + attribute("d.e") + ) + ) + ); + } + + public void testMetricWithGroupKeyAsAgg() { + assumeTrue("requires snapshot build", Build.current().isSnapshot()); + var queries = List.of("METRICS foo a BY a"); + for (String query : queries) { + expectVerificationError(query, "grouping key [a] already specified in the STATS BY clause"); + } + } + + private void assertStatement(String statement, LogicalPlan expected) { + final LogicalPlan actual; + try { + actual = statement(statement); + } catch (Exception e) { + throw new AssertionError("parsing error for [" + statement + "]", e); + } + assertThat(statement, actual, equalTo(expected)); + } + private LogicalPlan statement(String e) { return statement(e, List.of()); } From 4d04bf4053ec28f2ecab3ab352f0047f3c2222e8 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Sat, 11 May 2024 16:49:23 +0100 Subject: [PATCH 068/119] Fix integer overflow in native scalar quantizer (#108493) Offsets in memory segments should be computed as longs to avoid integer overflow on large segments. --------- Co-authored-by: ChrisHegarty Co-authored-by: Elastic Machine --- ...stractInt7ScalarQuantizedVectorScorer.java | 2 +- .../vec/internal/Int7DotProduct.java | 4 +- .../vec/internal/Int7Euclidean.java | 4 +- .../vec/internal/Int7MaximumInnerProduct.java | 4 +- .../vec/VectorScorerFactoryTests.java | 65 +++++++++++++++++++ 5 files changed, 72 insertions(+), 7 deletions(-) diff --git a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/AbstractInt7ScalarQuantizedVectorScorer.java b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/AbstractInt7ScalarQuantizedVectorScorer.java index 2e60079da864..2be0aa53f7c5 100644 --- a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/AbstractInt7ScalarQuantizedVectorScorer.java +++ b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/AbstractInt7ScalarQuantizedVectorScorer.java @@ -78,7 +78,7 @@ protected final void checkOrdinal(int ord) { } } - protected final float fallbackScore(int firstByteOffset, int secondByteOffset) throws IOException { + protected final float fallbackScore(long firstByteOffset, long secondByteOffset) throws IOException { input.seek(firstByteOffset); byte[] a = new byte[dims]; input.readBytes(a, 0, a.length); diff --git a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7DotProduct.java b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7DotProduct.java index f92bf0b52ed0..9b452219bd63 100644 --- a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7DotProduct.java +++ b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7DotProduct.java @@ -34,8 +34,8 @@ public float score(int firstOrd, int secondOrd) throws IOException { checkOrdinal(secondOrd); final int length = dims; - int firstByteOffset = firstOrd * (length + Float.BYTES); - int secondByteOffset = secondOrd * (length + Float.BYTES); + long firstByteOffset = (long) firstOrd * (length + Float.BYTES); + long secondByteOffset = (long) secondOrd * (length + Float.BYTES); MemorySegment firstSeg = segmentSlice(firstByteOffset, length); input.seek(firstByteOffset + length); diff --git a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7Euclidean.java b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7Euclidean.java index e1f16c6909cf..55b08a899bd7 100644 --- a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7Euclidean.java +++ b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7Euclidean.java @@ -34,8 +34,8 @@ public float score(int firstOrd, int secondOrd) throws IOException { checkOrdinal(secondOrd); final int length = dims; - int firstByteOffset = firstOrd * (length + Float.BYTES); - int secondByteOffset = secondOrd * (length + Float.BYTES); + long firstByteOffset = (long) firstOrd * (length + Float.BYTES); + long secondByteOffset = (long) secondOrd * (length + Float.BYTES); MemorySegment firstSeg = segmentSlice(firstByteOffset, length); MemorySegment secondSeg = segmentSlice(secondByteOffset, length); diff --git a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7MaximumInnerProduct.java b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7MaximumInnerProduct.java index bd6fc921f183..5cdfc62bc907 100644 --- a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7MaximumInnerProduct.java +++ b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7MaximumInnerProduct.java @@ -34,8 +34,8 @@ public float score(int firstOrd, int secondOrd) throws IOException { checkOrdinal(secondOrd); final int length = dims; - int firstByteOffset = firstOrd * (length + Float.BYTES); - int secondByteOffset = secondOrd * (length + Float.BYTES); + long firstByteOffset = (long) firstOrd * (length + Float.BYTES); + long secondByteOffset = (long) secondOrd * (length + Float.BYTES); MemorySegment firstSeg = segmentSlice(firstByteOffset, length); input.seek(firstByteOffset + length); diff --git a/libs/vec/src/test/java/org/elasticsearch/vec/VectorScorerFactoryTests.java b/libs/vec/src/test/java/org/elasticsearch/vec/VectorScorerFactoryTests.java index 115cf8e8cf9f..246ddaeb2ebc 100644 --- a/libs/vec/src/test/java/org/elasticsearch/vec/VectorScorerFactoryTests.java +++ b/libs/vec/src/test/java/org/elasticsearch/vec/VectorScorerFactoryTests.java @@ -8,6 +8,8 @@ package org.elasticsearch.vec; +import com.carrotsearch.randomizedtesting.generators.RandomNumbers; + import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; @@ -17,6 +19,8 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; +import java.util.Objects; +import java.util.Random; import java.util.function.Function; import static org.elasticsearch.vec.VectorSimilarityType.COSINE; @@ -226,6 +230,67 @@ void testRandomSliceImpl(int dims, long maxChunkSize, int initialPadding, Functi } } + // Tests with a large amount of data (> 2GB), which ensures that data offsets do not overflow + @Nightly + public void testLarge() throws IOException { + var factory = AbstractVectorTestCase.factory.get(); + + try (Directory dir = new MMapDirectory(createTempDir(getTestName()))) { + final int dims = 8192; + final int size = 262144; + final float correction = randomFloat(); + + String fileName = getTestName() + "-" + dims; + logger.info("Testing " + fileName); + try (IndexOutput out = dir.createOutput(fileName, IOContext.DEFAULT)) { + for (int i = 0; i < size; i++) { + var vec = vector(i, dims); + var off = (float) i; + out.writeBytes(vec, 0, vec.length); + out.writeInt(Float.floatToIntBits(off)); + } + } + try (IndexInput in = dir.openInput(fileName, IOContext.DEFAULT)) { + for (int times = 0; times < TIMES; times++) { + int idx0 = randomIntBetween(0, size - 1); + int idx1 = size - 1; + float off0 = (float) idx0; + float off1 = (float) idx1; + // dot product + float expected = luceneScore(DOT_PRODUCT, vector(idx0, dims), vector(idx1, dims), correction, off0, off1); + var scorer = factory.getInt7ScalarQuantizedVectorScorer(dims, size, correction, DOT_PRODUCT, in).get(); + assertThat(scorer.score(idx0, idx1), equalTo(expected)); + assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(idx0).score(idx1), equalTo(expected)); + // max inner product + expected = luceneScore(MAXIMUM_INNER_PRODUCT, vector(idx0, dims), vector(idx1, dims), correction, off0, off1); + scorer = factory.getInt7ScalarQuantizedVectorScorer(dims, size, correction, MAXIMUM_INNER_PRODUCT, in).get(); + assertThat(scorer.score(idx0, idx1), equalTo(expected)); + assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(idx0).score(idx1), equalTo(expected)); + // cosine + expected = luceneScore(COSINE, vector(idx0, dims), vector(idx1, dims), correction, off0, off1); + scorer = factory.getInt7ScalarQuantizedVectorScorer(dims, size, correction, COSINE, in).get(); + assertThat(scorer.score(idx0, idx1), equalTo(expected)); + assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(idx0).score(idx1), equalTo(expected)); + // euclidean + expected = luceneScore(EUCLIDEAN, vector(idx0, dims), vector(idx1, dims), correction, off0, off1); + scorer = factory.getInt7ScalarQuantizedVectorScorer(dims, size, correction, EUCLIDEAN, in).get(); + assertThat(scorer.score(idx0, idx1), equalTo(expected)); + assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(idx0).score(idx1), equalTo(expected)); + } + } + } + } + + // creates the vector based on the given ordinal, which is reproducible given the ord and dims + static byte[] vector(int ord, int dims) { + var random = new Random(Objects.hash(ord, dims)); + byte[] ba = new byte[dims]; + for (int i = 0; i < dims; i++) { + ba[i] = (byte) RandomNumbers.randomIntBetween(random, MIN_INT7_VALUE, MAX_INT7_VALUE); + } + return ba; + } + static Function BYTE_ARRAY_RANDOM_INT7_FUNC = size -> { byte[] ba = new byte[size]; randomBytesBetween(ba, MIN_INT7_VALUE, MAX_INT7_VALUE); From 0b6cea8c85eff73be4dc056ce997aa93e16f8401 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Sun, 12 May 2024 12:32:11 +0200 Subject: [PATCH 069/119] Remove redundant results field in RankFeaturePhase (#108387) The `rankPhaseResults` are never used, just redundantly created and released => remove them. --- .../java/org/elasticsearch/action/search/RankFeaturePhase.java | 3 --- 1 file changed, 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/search/RankFeaturePhase.java b/server/src/main/java/org/elasticsearch/action/search/RankFeaturePhase.java index a18d2c641854..767597625edc 100644 --- a/server/src/main/java/org/elasticsearch/action/search/RankFeaturePhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/RankFeaturePhase.java @@ -22,7 +22,6 @@ public final class RankFeaturePhase extends SearchPhase { private final SearchPhaseContext context; private final SearchPhaseResults queryPhaseResults; - private final SearchPhaseResults rankPhaseResults; private final AggregatedDfs aggregatedDfs; @@ -39,8 +38,6 @@ public final class RankFeaturePhase extends SearchPhase { this.context = context; this.queryPhaseResults = queryPhaseResults; this.aggregatedDfs = aggregatedDfs; - this.rankPhaseResults = new ArraySearchPhaseResults<>(context.getNumShards()); - context.addReleasable(rankPhaseResults); } @Override From 1dd9f5e8f452917032ba747efc14d1ce4654bb61 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 13 May 2024 08:44:20 +0100 Subject: [PATCH 070/119] Fix serialization of `JoinStatus#age` (#108542) This particular field is being sent in a strange way today. This commit fixes the wire protocol to use the standard `TimeValue` serialization. --- .../org/elasticsearch/TransportVersions.java | 1 + .../cluster/coordination/JoinStatus.java | 18 +++++++++++++++--- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index db43a12cf901..9589885d6df3 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -196,6 +196,7 @@ static TransportVersion def(int id) { public static final TransportVersion ROLLUP_USAGE = def(8_653_00_0); public static final TransportVersion SECURITY_ROLE_DESCRIPTION = def(8_654_00_0); public static final TransportVersion ML_INFERENCE_AZURE_OPENAI_COMPLETIONS = def(8_655_00_0); + public static final TransportVersion JOIN_STATUS_AGE_SERIALIZATION = def(8_656_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinStatus.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinStatus.java index 19113bc77000..6e0e7d8dda5a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinStatus.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinStatus.java @@ -8,6 +8,7 @@ package org.elasticsearch.cluster.coordination; +import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -19,7 +20,14 @@ public record JoinStatus(DiscoveryNode remoteNode, long term, String message, TimeValue age) implements Writeable { public JoinStatus(StreamInput in) throws IOException { - this(new DiscoveryNode(in), in.readLong(), in.readString(), new TimeValue(in.readLong(), TimeUnit.valueOf(in.readString()))); + this( + new DiscoveryNode(in), + in.readLong(), + in.readString(), + in.getTransportVersion().onOrAfter(TransportVersions.JOIN_STATUS_AGE_SERIALIZATION) + ? in.readTimeValue() + : new TimeValue(in.readLong(), TimeUnit.valueOf(in.readString())) + ); } @Override @@ -27,7 +35,11 @@ public void writeTo(StreamOutput out) throws IOException { remoteNode.writeTo(out); out.writeLong(term); out.writeString(message); - out.writeLong(age.duration()); - out.writeString(age.timeUnit().name()); + if (out.getTransportVersion().onOrAfter(TransportVersions.JOIN_STATUS_AGE_SERIALIZATION)) { + out.writeTimeValue(age); + } else { + out.writeLong(age.duration()); + out.writeString(age.timeUnit().name()); + } } } From 5832b12c6745bcd7bc8c87811b6842fcf4eb3883 Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Mon, 13 May 2024 09:45:08 +0200 Subject: [PATCH 071/119] Fix TranslogTests testDisabledFsync (#108407) The test used to reuse a translogDir directory that has nother translog created in it. I believe the other one locked files and prevented them from being deleted. --- .../java/org/elasticsearch/index/translog/TranslogTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 72abe322c702..efa46443e2da 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -4000,8 +4000,8 @@ static boolean hasCircularReference(Exception cause) { return false; } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/108321") public void testDisabledFsync() throws IOException { + var translogDir = createTempDir(); var config = new TranslogConfig( shardId, translogDir, From 3a93161dc99fbebfe41870b7da964bae2702e964 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 13 May 2024 10:26:31 +0200 Subject: [PATCH 072/119] Fix leaking ArraySearchPhaseResults during node shutdown (#108539) We should not quietly ignore exceptions during node shutdown. Lets do what we do on the transport side and just execute on the current thread if forking during shutdown is not possible anymore. Closes #104361 Closes #103108 Closes #104081 --------- Co-authored-by: David Turner --- .../action/search/AbstractSearchAsyncAction.java | 13 ++++++++++++- .../ml/integration/MlDistributedFailureIT.java | 1 - 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index 2f307d653f8a..0db9f3d20d11 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.shard.ShardId; @@ -374,7 +375,17 @@ protected abstract void executePhaseOnShard( protected void fork(final Runnable runnable) { executor.execute(new AbstractRunnable() { @Override - public void onFailure(Exception e) {} + public void onFailure(Exception e) { + logger.error(() -> "unexpected error during [" + task + "]", e); + assert false : e; + } + + @Override + public void onRejection(Exception e) { + // avoid leaks during node shutdown by executing on the current thread if the executor shuts down + assert e instanceof EsRejectedExecutionException esre && esre.isExecutorShutdown() : e; + doRun(); + } @Override protected void doRun() { diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java index a2b00974d403..33fd7c108863 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java @@ -144,7 +144,6 @@ public void testLoseDedicatedMasterNode() throws Exception { }); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104081") public void testFullClusterRestart() throws Exception { internalCluster().ensureAtLeastNumDataNodes(3); ensureStableCluster(); From e7aacd497e82426643e2c85f07a28438d3a7b144 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 13 May 2024 09:41:47 +0100 Subject: [PATCH 073/119] AwaitsFix for #108288 --- .../org/elasticsearch/indices/recovery/DanglingIndicesIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/DanglingIndicesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/DanglingIndicesIT.java index c47ada432f4b..0b9ca9d9f958 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/DanglingIndicesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/DanglingIndicesIT.java @@ -175,6 +175,7 @@ public void testMustAcceptDataLossToImportDanglingIndex() throws Exception { * other will be considered dangling, and can therefore be listed and * deleted through the API */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/108288") public void testDanglingIndexCanBeDeleted() throws Exception { final Settings settings = buildSettings(1, true); internalCluster().startNodes(3, settings); From 5ff94f8cd8029eb08f33a92970c95a1d4d0c0a54 Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Mon, 13 May 2024 10:55:39 +0200 Subject: [PATCH 074/119] [DOCS] Update run-elasticsearch-locally.asciidoc (#108545) Remove confusing link as already available in note at end of page --- docs/reference/quickstart/run-elasticsearch-locally.asciidoc | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docs/reference/quickstart/run-elasticsearch-locally.asciidoc b/docs/reference/quickstart/run-elasticsearch-locally.asciidoc index cfad434b890d..0db395ba34b0 100644 --- a/docs/reference/quickstart/run-elasticsearch-locally.asciidoc +++ b/docs/reference/quickstart/run-elasticsearch-locally.asciidoc @@ -10,8 +10,6 @@ The instructions on this page are for *local development only*. Do not use these instructions for production deployments, because they are not secure. While this approach is convenient for experimenting and learning, you should never run the service in this way in a production environment. - -Refer to https://www.elastic.co/guide/en/elasticsearch/reference/current/install-elasticsearch.html[Install {es}] to learn about the various options for installing {es} in a production environment, including using Docker. ==== The following commands help you very quickly spin up a single-node {es} cluster, together with {kib} in Docker. @@ -174,4 +172,4 @@ Use our <> to learn the basics of {es}: how t This setup is not suitable for production use. For production deployments, we recommend using our managed service on Elastic Cloud. https://cloud.elastic.co/registration[Sign up for a free trial] (no credit card required). -Otherwise, refer to https://www.elastic.co/guide/en/elasticsearch/reference/current/install-elasticsearch.html[Install {es}] to learn about the various options for installing {es} in a self-managed production environment, including using Docker. \ No newline at end of file +Otherwise, refer to https://www.elastic.co/guide/en/elasticsearch/reference/current/install-elasticsearch.html[Install {es}] to learn about the various options for installing {es} in a self-managed production environment, including using Docker. From 605b61847cf8b68aac2b6a9fc132a4f0bc84fdcb Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Mon, 13 May 2024 11:03:48 +0200 Subject: [PATCH 075/119] semantic_text - document ingestion inference (#108102) Co-authored-by: @jimczi --- .../action/bulk/BulkOperation.java | 4 + .../action/bulk/BulkShardRequest.java | 32 + x-pack/plugin/inference/build.gradle | 2 +- .../MockDenseInferenceServiceIT.java | 11 +- .../MockSparseInferenceServiceIT.java | 11 +- .../mock/AbstractTestInferenceService.java | 13 +- .../TestDenseInferenceServiceExtension.java | 8 +- .../TestSparseInferenceServiceExtension.java | 13 +- .../xpack/inference/InferencePlugin.java | 16 + .../ShardBulkInferenceActionFilter.java | 536 ++++++++++++ .../ShardBulkInferenceActionFilterTests.java | 386 +++++++++ .../xpack/inference/InferenceRestIT.java | 3 +- .../inference/30_semantic_text_inference.yml | 773 ++++++++++++++++++ .../CoordinatedInferenceIngestIT.java | 4 +- 14 files changed, 1788 insertions(+), 24 deletions(-) create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java create mode 100644 x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java index fcad07d0696f..7356dc0ea140 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java @@ -298,6 +298,10 @@ private void executeBulkRequestsByShard( bulkRequest.getRefreshPolicy(), requests.toArray(new BulkItemRequest[0]) ); + var indexMetadata = clusterState.getMetadata().index(shardId.getIndexName()); + if (indexMetadata != null && indexMetadata.getInferenceFields().isEmpty() == false) { + bulkShardRequest.setInferenceFieldMap(indexMetadata.getInferenceFields()); + } bulkShardRequest.waitForActiveShards(bulkRequest.waitForActiveShards()); bulkShardRequest.timeout(bulkRequest.timeout()); bulkShardRequest.routedBasedOnClusterVersion(clusterState.version()); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java index bd929b9a2204..85b7fc03ff66 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkShardRequest.java @@ -15,6 +15,7 @@ import org.elasticsearch.action.support.replication.ReplicatedWriteRequest; import org.elasticsearch.action.support.replication.ReplicationRequest; import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.cluster.metadata.InferenceFieldMetadata; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.set.Sets; @@ -22,6 +23,7 @@ import org.elasticsearch.transport.RawIndexingDataTransportRequest; import java.io.IOException; +import java.util.Map; import java.util.Set; public final class BulkShardRequest extends ReplicatedWriteRequest @@ -33,6 +35,8 @@ public final class BulkShardRequest extends ReplicatedWriteRequest inferenceFieldMap = null; + public BulkShardRequest(StreamInput in) throws IOException { super(in); items = in.readArray(i -> i.readOptionalWriteable(inpt -> new BulkItemRequest(shardId, inpt)), BulkItemRequest[]::new); @@ -44,6 +48,30 @@ public BulkShardRequest(ShardId shardId, RefreshPolicy refreshPolicy, BulkItemRe setRefreshPolicy(refreshPolicy); } + /** + * Public for test + * Set the transient metadata indicating that this request requires running inference before proceeding. + */ + public void setInferenceFieldMap(Map fieldInferenceMap) { + this.inferenceFieldMap = fieldInferenceMap; + } + + /** + * Consumes the inference metadata to execute inference on the bulk items just once. + */ + public Map consumeInferenceFieldMap() { + Map ret = inferenceFieldMap; + inferenceFieldMap = null; + return ret; + } + + /** + * Public for test + */ + public Map getInferenceFieldMap() { + return inferenceFieldMap; + } + public long totalSizeInBytes() { long totalSizeInBytes = 0; for (int i = 0; i < items.length; i++) { @@ -85,6 +113,10 @@ public String[] indices() { @Override public void writeTo(StreamOutput out) throws IOException { + if (inferenceFieldMap != null) { + // Inferencing metadata should have been consumed as part of the ShardBulkInferenceActionFilter processing + throw new IllegalStateException("Inference metadata should have been consumed before writing to the stream"); + } super.writeTo(out); out.writeArray((o, item) -> { if (item != null) { diff --git a/x-pack/plugin/inference/build.gradle b/x-pack/plugin/inference/build.gradle index 3e2171d0654d..5429e46a1d3f 100644 --- a/x-pack/plugin/inference/build.gradle +++ b/x-pack/plugin/inference/build.gradle @@ -12,7 +12,7 @@ apply plugin: 'elasticsearch.internal-yaml-rest-test' restResources { restApi { - include '_common', 'indices', 'inference', 'index' + include '_common', 'bulk', 'indices', 'inference', 'index', 'get', 'update', 'reindex', 'search' } } diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockDenseInferenceServiceIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockDenseInferenceServiceIT.java index 41b0ed3c0314..833b1fd3673f 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockDenseInferenceServiceIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockDenseInferenceServiceIT.java @@ -27,9 +27,16 @@ public void testMockService() throws IOException { assertEquals("text_embedding_test_service", modelMap.get("service")); } - // The response is randomly generated, the input can be anything - var inference = inferOnMockService(inferenceEntityId, List.of(randomAlphaOfLength(10))); + List input = List.of(randomAlphaOfLength(10)); + var inference = inferOnMockService(inferenceEntityId, input); assertNonEmptyInferenceResults(inference, 1, TaskType.TEXT_EMBEDDING); + // Same input should return the same result + assertEquals(inference, inferOnMockService(inferenceEntityId, input)); + // Different input values should not + assertNotEquals( + inference, + inferOnMockService(inferenceEntityId, randomValueOtherThan(input, () -> List.of(randomAlphaOfLength(10)))) + ); } public void testMockServiceWithMultipleInputs() throws IOException { diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockSparseInferenceServiceIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockSparseInferenceServiceIT.java index c580d72fcc40..97e0641f37c3 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockSparseInferenceServiceIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockSparseInferenceServiceIT.java @@ -29,9 +29,16 @@ public void testMockService() throws IOException { assertEquals("test_service", modelMap.get("service")); } - // The response is randomly generated, the input can be anything - var inference = inferOnMockService(inferenceEntityId, List.of(randomAlphaOfLength(10))); + List input = List.of(randomAlphaOfLength(10)); + var inference = inferOnMockService(inferenceEntityId, input); assertNonEmptyInferenceResults(inference, 1, TaskType.SPARSE_EMBEDDING); + // Same input should return the same result + assertEquals(inference, inferOnMockService(inferenceEntityId, input)); + // Different input values should not + assertNotEquals( + inference, + inferOnMockService(inferenceEntityId, randomValueOtherThan(input, () -> List.of(randomAlphaOfLength(10)))) + ); } public void testMockServiceWithMultipleInputs() throws IOException { diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/AbstractTestInferenceService.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/AbstractTestInferenceService.java index 99dfc9582eb0..1bde3704864d 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/AbstractTestInferenceService.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/AbstractTestInferenceService.java @@ -27,6 +27,14 @@ public abstract class AbstractTestInferenceService implements InferenceService { + protected static int stringWeight(String input, int position) { + int hashCode = input.hashCode(); + if (hashCode < 0) { + hashCode = -hashCode; + } + return hashCode + position; + } + @Override public TransportVersion getMinimalSupportedVersion() { return TransportVersion.current(); // fine for these tests but will not work for cluster upgrade tests @@ -101,11 +109,6 @@ public TestServiceModel( super(new ModelConfigurations(modelId, taskType, service, serviceSettings, taskSettings), new ModelSecrets(secretSettings)); } - @Override - public TestDenseInferenceServiceExtension.TestServiceSettings getServiceSettings() { - return (TestDenseInferenceServiceExtension.TestServiceSettings) super.getServiceSettings(); - } - @Override public TestTaskSettings getTaskSettings() { return (TestTaskSettings) super.getTaskSettings(); diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java index c81dbdc45463..a54b14d8fad1 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java @@ -124,7 +124,7 @@ private TextEmbeddingResults makeResults(List input, int dimensions) { for (int i = 0; i < input.size(); i++) { List values = new ArrayList<>(); for (int j = 0; j < dimensions; j++) { - values.add((float) j); + values.add((float) stringWeight(input.get(i), j)); } embeddings.add(new TextEmbeddingResults.Embedding(values)); } @@ -135,8 +135,8 @@ private List makeChunkedResults(List inp var results = new ArrayList(); for (int i = 0; i < input.size(); i++) { double[] values = new double[dimensions]; - for (int j = 0; j < 5; j++) { - values[j] = j; + for (int j = 0; j < dimensions; j++) { + values[j] = stringWeight(input.get(i), j); } results.add( new org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults( @@ -172,7 +172,7 @@ public static TestServiceSettings fromMap(Map map) { SimilarityMeasure similarity = null; String similarityStr = (String) map.remove("similarity"); if (similarityStr != null) { - similarity = SimilarityMeasure.valueOf(similarityStr); + similarity = SimilarityMeasure.fromString(similarityStr); } return new TestServiceSettings(model, dimensions, similarity); diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java index b13e65d1ba80..42b8ccd11a64 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java @@ -121,7 +121,7 @@ private SparseEmbeddingResults makeResults(List input) { for (int i = 0; i < input.size(); i++) { var tokens = new ArrayList(); for (int j = 0; j < 5; j++) { - tokens.add(new SparseEmbeddingResults.WeightedToken(Integer.toString(j), (float) j)); + tokens.add(new SparseEmbeddingResults.WeightedToken("feature_" + j, stringWeight(input.get(i), j))); } embeddings.add(new SparseEmbeddingResults.Embedding(tokens, false)); } @@ -129,21 +129,22 @@ private SparseEmbeddingResults makeResults(List input) { } private List makeChunkedResults(List input) { - var chunks = new ArrayList(); + List results = new ArrayList<>(); for (int i = 0; i < input.size(); i++) { var tokens = new ArrayList(); for (int j = 0; j < 5; j++) { - tokens.add(new TextExpansionResults.WeightedToken(Integer.toString(j), (float) j)); + tokens.add(new TextExpansionResults.WeightedToken("feature_" + j, stringWeight(input.get(i), j))); } - chunks.add(new ChunkedTextExpansionResults.ChunkedResult(input.get(i), tokens)); + results.add( + new ChunkedSparseEmbeddingResults(List.of(new ChunkedTextExpansionResults.ChunkedResult(input.get(i), tokens))) + ); } - return List.of(new ChunkedSparseEmbeddingResults(chunks)); + return results; } protected ServiceSettings getServiceSettingsFromMap(Map serviceSettingsMap) { return TestServiceSettings.fromMap(serviceSettingsMap); } - } public record TestServiceSettings(String model, String hiddenField, boolean shouldReturnHiddenField) implements ServiceSettings { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 1afe3c891db8..34459c3beff9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -45,6 +46,7 @@ import org.elasticsearch.xpack.inference.action.TransportInferenceAction; import org.elasticsearch.xpack.inference.action.TransportInferenceUsageAction; import org.elasticsearch.xpack.inference.action.TransportPutInferenceModelAction; +import org.elasticsearch.xpack.inference.action.filter.ShardBulkInferenceActionFilter; import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.HttpSettings; @@ -76,6 +78,8 @@ import java.util.stream.Collectors; import java.util.stream.Stream; +import static java.util.Collections.singletonList; + public class InferencePlugin extends Plugin implements ActionPlugin, ExtensiblePlugin, SystemIndexPlugin, MapperPlugin { /** @@ -101,6 +105,7 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP private final SetOnce serviceComponents = new SetOnce<>(); private final SetOnce inferenceServiceRegistry = new SetOnce<>(); + private final SetOnce shardBulkInferenceActionFilter = new SetOnce<>(); private List inferenceServiceExtensions; public InferencePlugin(Settings settings) { @@ -166,6 +171,9 @@ public Collection createComponents(PluginServices services) { registry.init(services.client()); inferenceServiceRegistry.set(registry); + var actionFilter = new ShardBulkInferenceActionFilter(registry, modelRegistry); + shardBulkInferenceActionFilter.set(actionFilter); + return List.of(modelRegistry, registry); } @@ -272,4 +280,12 @@ public Map getMappers() { } return Map.of(); } + + @Override + public Collection getActionFilters() { + if (SemanticTextFeature.isEnabled()) { + return singletonList(shardBulkInferenceActionFilter.get()); + } + return List.of(); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java new file mode 100644 index 000000000000..38d8b8d9b35c --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilter.java @@ -0,0 +1,536 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action.filter; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.bulk.BulkItemRequest; +import org.elasticsearch.action.bulk.BulkShardRequest; +import org.elasticsearch.action.bulk.TransportShardBulkAction; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.ActionFilterChain; +import org.elasticsearch.action.support.MappedActionFilter; +import org.elasticsearch.action.support.RefCountingRunnable; +import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.cluster.metadata.InferenceFieldMetadata; +import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.ChunkedInferenceServiceResults; +import org.elasticsearch.inference.ChunkingOptions; +import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.inference.InferenceServiceRegistry; +import org.elasticsearch.inference.InputType; +import org.elasticsearch.inference.Model; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; +import org.elasticsearch.xpack.inference.mapper.SemanticTextField; +import org.elasticsearch.xpack.inference.mapper.SemanticTextFieldMapper; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import static org.elasticsearch.xpack.inference.mapper.SemanticTextField.toSemanticTextFieldChunks; + +/** + * A {@link MappedActionFilter} that intercepts {@link BulkShardRequest} to apply inference on fields specified + * as {@link SemanticTextFieldMapper} in the index mapping. For each semantic text field referencing fields in + * the request source, we generate embeddings and include the results in the source under the semantic text field + * name as a {@link SemanticTextField}. + * This transformation happens on the bulk coordinator node, and the {@link SemanticTextFieldMapper} parses the + * results during indexing on the shard. + * + * TODO: batchSize should be configurable via a cluster setting + */ +public class ShardBulkInferenceActionFilter implements MappedActionFilter { + protected static final int DEFAULT_BATCH_SIZE = 512; + + private final InferenceServiceRegistry inferenceServiceRegistry; + private final ModelRegistry modelRegistry; + private final int batchSize; + + public ShardBulkInferenceActionFilter(InferenceServiceRegistry inferenceServiceRegistry, ModelRegistry modelRegistry) { + this(inferenceServiceRegistry, modelRegistry, DEFAULT_BATCH_SIZE); + } + + public ShardBulkInferenceActionFilter(InferenceServiceRegistry inferenceServiceRegistry, ModelRegistry modelRegistry, int batchSize) { + this.inferenceServiceRegistry = inferenceServiceRegistry; + this.modelRegistry = modelRegistry; + this.batchSize = batchSize; + } + + @Override + public int order() { + // must execute last (after the security action filter) + return Integer.MAX_VALUE; + } + + @Override + public String actionName() { + return TransportShardBulkAction.ACTION_NAME; + } + + @Override + public void apply( + Task task, + String action, + Request request, + ActionListener listener, + ActionFilterChain chain + ) { + if (TransportShardBulkAction.ACTION_NAME.equals(action)) { + BulkShardRequest bulkShardRequest = (BulkShardRequest) request; + var fieldInferenceMetadata = bulkShardRequest.consumeInferenceFieldMap(); + if (fieldInferenceMetadata != null && fieldInferenceMetadata.isEmpty() == false) { + Runnable onInferenceCompletion = () -> chain.proceed(task, action, request, listener); + processBulkShardRequest(fieldInferenceMetadata, bulkShardRequest, onInferenceCompletion); + return; + } + } + chain.proceed(task, action, request, listener); + } + + private void processBulkShardRequest( + Map fieldInferenceMap, + BulkShardRequest bulkShardRequest, + Runnable onCompletion + ) { + new AsyncBulkShardInferenceAction(fieldInferenceMap, bulkShardRequest, onCompletion).run(); + } + + private record InferenceProvider(InferenceService service, Model model) {} + + /** + * A field inference request on a single input. + * @param index The index of the request in the original bulk request. + * @param field The target field. + * @param input The input to run inference on. + * @param inputOrder The original order of the input. + * @param isOriginalFieldInput Whether the input is part of the original values of the field. + */ + private record FieldInferenceRequest(int index, String field, String input, int inputOrder, boolean isOriginalFieldInput) {} + + /** + * The field inference response. + * @param field The target field. + * @param input The input that was used to run inference. + * @param inputOrder The original order of the input. + * @param isOriginalFieldInput Whether the input is part of the original values of the field. + * @param model The model used to run inference. + * @param chunkedResults The actual results. + */ + private record FieldInferenceResponse( + String field, + String input, + int inputOrder, + boolean isOriginalFieldInput, + Model model, + ChunkedInferenceServiceResults chunkedResults + ) {} + + private record FieldInferenceResponseAccumulator( + int id, + Map> responses, + List failures + ) { + void addOrUpdateResponse(FieldInferenceResponse response) { + synchronized (this) { + var list = responses.computeIfAbsent(response.field, k -> new ArrayList<>()); + list.add(response); + } + } + + void addFailure(Exception exc) { + synchronized (this) { + failures.add(exc); + } + } + } + + private class AsyncBulkShardInferenceAction implements Runnable { + private final Map fieldInferenceMap; + private final BulkShardRequest bulkShardRequest; + private final Runnable onCompletion; + private final AtomicArray inferenceResults; + + private AsyncBulkShardInferenceAction( + Map fieldInferenceMap, + BulkShardRequest bulkShardRequest, + Runnable onCompletion + ) { + this.fieldInferenceMap = fieldInferenceMap; + this.bulkShardRequest = bulkShardRequest; + this.inferenceResults = new AtomicArray<>(bulkShardRequest.items().length); + this.onCompletion = onCompletion; + } + + @Override + public void run() { + Map> inferenceRequests = createFieldInferenceRequests(bulkShardRequest); + Runnable onInferenceCompletion = () -> { + try { + for (var inferenceResponse : inferenceResults.asList()) { + var request = bulkShardRequest.items()[inferenceResponse.id]; + try { + applyInferenceResponses(request, inferenceResponse); + } catch (Exception exc) { + request.abort(bulkShardRequest.index(), exc); + } + } + } finally { + onCompletion.run(); + } + }; + try (var releaseOnFinish = new RefCountingRunnable(onInferenceCompletion)) { + for (var entry : inferenceRequests.entrySet()) { + executeShardBulkInferenceAsync(entry.getKey(), null, entry.getValue(), releaseOnFinish.acquire()); + } + } + } + + private void executeShardBulkInferenceAsync( + final String inferenceId, + @Nullable InferenceProvider inferenceProvider, + final List requests, + final Releasable onFinish + ) { + if (inferenceProvider == null) { + ActionListener modelLoadingListener = new ActionListener<>() { + @Override + public void onResponse(ModelRegistry.UnparsedModel unparsedModel) { + var service = inferenceServiceRegistry.getService(unparsedModel.service()); + if (service.isEmpty() == false) { + var provider = new InferenceProvider( + service.get(), + service.get() + .parsePersistedConfigWithSecrets( + inferenceId, + unparsedModel.taskType(), + unparsedModel.settings(), + unparsedModel.secrets() + ) + ); + executeShardBulkInferenceAsync(inferenceId, provider, requests, onFinish); + } else { + try (onFinish) { + for (FieldInferenceRequest request : requests) { + inferenceResults.get(request.index).failures.add( + new ResourceNotFoundException( + "Inference service [{}] not found for field [{}]", + unparsedModel.service(), + request.field + ) + ); + } + } + } + } + + @Override + public void onFailure(Exception exc) { + try (onFinish) { + for (FieldInferenceRequest request : requests) { + Exception failure; + if (ExceptionsHelper.unwrap(exc, ResourceNotFoundException.class) instanceof ResourceNotFoundException) { + failure = new ResourceNotFoundException( + "Inference id [{}] not found for field [{}]", + inferenceId, + request.field + ); + } else { + failure = new ElasticsearchException( + "Error loading inference for inference id [{}] on field [{}]", + exc, + inferenceId, + request.field + ); + } + inferenceResults.get(request.index).failures.add(failure); + } + } + } + }; + modelRegistry.getModelWithSecrets(inferenceId, modelLoadingListener); + return; + } + int currentBatchSize = Math.min(requests.size(), batchSize); + final List currentBatch = requests.subList(0, currentBatchSize); + final List nextBatch = requests.subList(currentBatchSize, requests.size()); + final List inputs = currentBatch.stream().map(FieldInferenceRequest::input).collect(Collectors.toList()); + ActionListener> completionListener = new ActionListener<>() { + @Override + public void onResponse(List results) { + try { + var requestsIterator = requests.iterator(); + for (ChunkedInferenceServiceResults result : results) { + var request = requestsIterator.next(); + var acc = inferenceResults.get(request.index); + if (result instanceof ErrorChunkedInferenceResults error) { + acc.addFailure( + new ElasticsearchException( + "Exception when running inference id [{}] on field [{}]", + error.getException(), + inferenceProvider.model.getInferenceEntityId(), + request.field + ) + ); + } else { + acc.addOrUpdateResponse( + new FieldInferenceResponse( + request.field(), + request.input(), + request.inputOrder(), + request.isOriginalFieldInput(), + inferenceProvider.model, + result + ) + ); + } + } + } finally { + onFinish(); + } + } + + @Override + public void onFailure(Exception exc) { + try { + for (FieldInferenceRequest request : requests) { + addInferenceResponseFailure( + request.index, + new ElasticsearchException( + "Exception when running inference id [{}] on field [{}]", + exc, + inferenceProvider.model.getInferenceEntityId(), + request.field + ) + ); + } + } finally { + onFinish(); + } + } + + private void onFinish() { + if (nextBatch.isEmpty()) { + onFinish.close(); + } else { + executeShardBulkInferenceAsync(inferenceId, inferenceProvider, nextBatch, onFinish); + } + } + }; + inferenceProvider.service() + .chunkedInfer( + inferenceProvider.model(), + null, + inputs, + Map.of(), + InputType.INGEST, + new ChunkingOptions(null, null), + TimeValue.MAX_VALUE, + completionListener + ); + } + + private FieldInferenceResponseAccumulator ensureResponseAccumulatorSlot(int id) { + FieldInferenceResponseAccumulator acc = inferenceResults.get(id); + if (acc == null) { + acc = new FieldInferenceResponseAccumulator(id, new HashMap<>(), new ArrayList<>()); + inferenceResults.set(id, acc); + } + return acc; + } + + private void addInferenceResponseFailure(int id, Exception failure) { + var acc = ensureResponseAccumulatorSlot(id); + acc.addFailure(failure); + } + + /** + * Applies the {@link FieldInferenceResponseAccumulator} to the provided {@link BulkItemRequest}. + * If the response contains failures, the bulk item request is marked as failed for the downstream action. + * Otherwise, the source of the request is augmented with the field inference results under the + * {@link SemanticTextField#INFERENCE_FIELD} field. + */ + private void applyInferenceResponses(BulkItemRequest item, FieldInferenceResponseAccumulator response) { + if (response.failures().isEmpty() == false) { + for (var failure : response.failures()) { + item.abort(item.index(), failure); + } + return; + } + + final IndexRequest indexRequest = getIndexRequestOrNull(item.request()); + var newDocMap = indexRequest.sourceAsMap(); + for (var entry : response.responses.entrySet()) { + var fieldName = entry.getKey(); + var responses = entry.getValue(); + var model = responses.get(0).model(); + // ensure that the order in the original field is consistent in case of multiple inputs + Collections.sort(responses, Comparator.comparingInt(FieldInferenceResponse::inputOrder)); + List inputs = responses.stream().filter(r -> r.isOriginalFieldInput).map(r -> r.input).collect(Collectors.toList()); + List results = responses.stream().map(r -> r.chunkedResults).collect(Collectors.toList()); + var result = new SemanticTextField( + fieldName, + inputs, + new SemanticTextField.InferenceResult( + model.getInferenceEntityId(), + new SemanticTextField.ModelSettings(model), + toSemanticTextFieldChunks(fieldName, model.getInferenceEntityId(), results, indexRequest.getContentType()) + ), + indexRequest.getContentType() + ); + newDocMap.put(fieldName, result); + } + indexRequest.source(newDocMap, indexRequest.getContentType()); + } + + /** + * Register a {@link FieldInferenceRequest} for every non-empty field referencing an inference ID in the index. + * If results are already populated for fields in the original index request, the inference request for this specific + * field is skipped, and the existing results remain unchanged. + * Validation of inference ID and model settings occurs in the {@link SemanticTextFieldMapper} during field indexing, + * where an error will be thrown if they mismatch or if the content is malformed. + *

+ * TODO: We should validate the settings for pre-existing results here and apply the inference only if they differ? + */ + private Map> createFieldInferenceRequests(BulkShardRequest bulkShardRequest) { + Map> fieldRequestsMap = new LinkedHashMap<>(); + int itemIndex = 0; + for (var item : bulkShardRequest.items()) { + if (item.getPrimaryResponse() != null) { + // item was already aborted/processed by a filter in the chain upstream (e.g. security) + continue; + } + boolean isUpdateRequest = false; + final IndexRequest indexRequest; + if (item.request() instanceof IndexRequest ir) { + indexRequest = ir; + } else if (item.request() instanceof UpdateRequest updateRequest) { + isUpdateRequest = true; + if (updateRequest.script() != null) { + addInferenceResponseFailure( + item.id(), + new ElasticsearchStatusException( + "Cannot apply update with a script on indices that contain [{}] field(s)", + RestStatus.BAD_REQUEST, + SemanticTextFieldMapper.CONTENT_TYPE + ) + ); + continue; + } + indexRequest = updateRequest.doc(); + } else { + // ignore delete request + continue; + } + final Map docMap = indexRequest.sourceAsMap(); + for (var entry : fieldInferenceMap.values()) { + String field = entry.getName(); + String inferenceId = entry.getInferenceId(); + var originalFieldValue = XContentMapValues.extractValue(field, docMap); + if (originalFieldValue instanceof Map) { + continue; + } + int order = 0; + for (var sourceField : entry.getSourceFields()) { + boolean isOriginalFieldInput = sourceField.equals(field); + var valueObj = XContentMapValues.extractValue(sourceField, docMap); + if (valueObj == null) { + if (isUpdateRequest) { + addInferenceResponseFailure( + item.id(), + new ElasticsearchStatusException( + "Field [{}] must be specified on an update request to calculate inference for field [{}]", + RestStatus.BAD_REQUEST, + sourceField, + field + ) + ); + break; + } + continue; + } + ensureResponseAccumulatorSlot(itemIndex); + final List values; + try { + values = nodeStringValues(field, valueObj); + } catch (Exception exc) { + addInferenceResponseFailure(item.id(), exc); + break; + } + List fieldRequests = fieldRequestsMap.computeIfAbsent(inferenceId, k -> new ArrayList<>()); + for (var v : values) { + fieldRequests.add(new FieldInferenceRequest(itemIndex, field, v, order++, isOriginalFieldInput)); + } + } + } + itemIndex++; + } + return fieldRequestsMap; + } + } + + /** + * This method converts the given {@code valueObj} into a list of strings. + * If {@code valueObj} is not a string or a collection of strings, it throws an ElasticsearchStatusException. + */ + private static List nodeStringValues(String field, Object valueObj) { + if (valueObj instanceof String value) { + return List.of(value); + } else if (valueObj instanceof Collection values) { + List valuesString = new ArrayList<>(); + for (var v : values) { + if (v instanceof String value) { + valuesString.add(value); + } else { + throw new ElasticsearchStatusException( + "Invalid format for field [{}], expected [String] got [{}]", + RestStatus.BAD_REQUEST, + field, + valueObj.getClass().getSimpleName() + ); + } + } + return valuesString; + } + throw new ElasticsearchStatusException( + "Invalid format for field [{}], expected [String] got [{}]", + RestStatus.BAD_REQUEST, + field, + valueObj.getClass().getSimpleName() + ); + } + + static IndexRequest getIndexRequestOrNull(DocWriteRequest docWriteRequest) { + if (docWriteRequest instanceof IndexRequest indexRequest) { + return indexRequest; + } else if (docWriteRequest instanceof UpdateRequest updateRequest) { + return updateRequest.doc(); + } else { + return null; + } + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java new file mode 100644 index 000000000000..c87faa2b52cc --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterTests.java @@ -0,0 +1,386 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action.filter; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.bulk.BulkItemRequest; +import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.action.bulk.BulkShardRequest; +import org.elasticsearch.action.bulk.TransportShardBulkAction; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.ActionFilterChain; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.cluster.metadata.InferenceFieldMetadata; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.inference.ChunkedInferenceServiceResults; +import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.inference.InferenceServiceRegistry; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.core.inference.results.ChunkedSparseEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; +import org.elasticsearch.xpack.inference.model.TestModel; +import org.elasticsearch.xpack.inference.registry.ModelRegistry; +import org.junit.After; +import org.junit.Before; +import org.mockito.stubbing.Answer; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.awaitLatch; +import static org.elasticsearch.xpack.inference.action.filter.ShardBulkInferenceActionFilter.DEFAULT_BATCH_SIZE; +import static org.elasticsearch.xpack.inference.action.filter.ShardBulkInferenceActionFilter.getIndexRequestOrNull; +import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldTests.randomSemanticText; +import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldTests.randomSparseEmbeddings; +import static org.elasticsearch.xpack.inference.mapper.SemanticTextFieldTests.toChunkedResult; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class ShardBulkInferenceActionFilterTests extends ESTestCase { + private ThreadPool threadPool; + + @Before + public void setupThreadPool() { + threadPool = new TestThreadPool(getTestName()); + } + + @After + public void tearDownThreadPool() throws Exception { + terminate(threadPool); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + public void testFilterNoop() throws Exception { + ShardBulkInferenceActionFilter filter = createFilter(threadPool, Map.of(), DEFAULT_BATCH_SIZE); + CountDownLatch chainExecuted = new CountDownLatch(1); + ActionFilterChain actionFilterChain = (task, action, request, listener) -> { + try { + assertNull(((BulkShardRequest) request).getInferenceFieldMap()); + } finally { + chainExecuted.countDown(); + } + }; + ActionListener actionListener = mock(ActionListener.class); + Task task = mock(Task.class); + BulkShardRequest request = new BulkShardRequest( + new ShardId("test", "test", 0), + WriteRequest.RefreshPolicy.NONE, + new BulkItemRequest[0] + ); + request.setInferenceFieldMap( + Map.of("foo", new InferenceFieldMetadata("foo", "bar", generateRandomStringArray(5, 10, false, false))) + ); + filter.apply(task, TransportShardBulkAction.ACTION_NAME, request, actionListener, actionFilterChain); + awaitLatch(chainExecuted, 10, TimeUnit.SECONDS); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + public void testInferenceNotFound() throws Exception { + StaticModel model = StaticModel.createRandomInstance(); + ShardBulkInferenceActionFilter filter = createFilter( + threadPool, + Map.of(model.getInferenceEntityId(), model), + randomIntBetween(1, 10) + ); + CountDownLatch chainExecuted = new CountDownLatch(1); + ActionFilterChain actionFilterChain = (task, action, request, listener) -> { + try { + BulkShardRequest bulkShardRequest = (BulkShardRequest) request; + assertNull(bulkShardRequest.getInferenceFieldMap()); + for (BulkItemRequest item : bulkShardRequest.items()) { + assertNotNull(item.getPrimaryResponse()); + assertTrue(item.getPrimaryResponse().isFailed()); + BulkItemResponse.Failure failure = item.getPrimaryResponse().getFailure(); + assertThat(failure.getStatus(), equalTo(RestStatus.NOT_FOUND)); + } + } finally { + chainExecuted.countDown(); + } + }; + ActionListener actionListener = mock(ActionListener.class); + Task task = mock(Task.class); + + Map inferenceFieldMap = Map.of( + "field1", + new InferenceFieldMetadata("field1", model.getInferenceEntityId(), new String[] { "field1" }), + "field2", + new InferenceFieldMetadata("field2", "inference_0", new String[] { "field2" }), + "field3", + new InferenceFieldMetadata("field3", "inference_0", new String[] { "field3" }) + ); + BulkItemRequest[] items = new BulkItemRequest[10]; + for (int i = 0; i < items.length; i++) { + items[i] = randomBulkItemRequest(Map.of(), inferenceFieldMap)[0]; + } + BulkShardRequest request = new BulkShardRequest(new ShardId("test", "test", 0), WriteRequest.RefreshPolicy.NONE, items); + request.setInferenceFieldMap(inferenceFieldMap); + filter.apply(task, TransportShardBulkAction.ACTION_NAME, request, actionListener, actionFilterChain); + awaitLatch(chainExecuted, 10, TimeUnit.SECONDS); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + public void testItemFailures() throws Exception { + StaticModel model = StaticModel.createRandomInstance(); + ShardBulkInferenceActionFilter filter = createFilter( + threadPool, + Map.of(model.getInferenceEntityId(), model), + randomIntBetween(1, 10) + ); + model.putResult("I am a failure", new ErrorChunkedInferenceResults(new IllegalArgumentException("boom"))); + model.putResult("I am a success", randomSparseEmbeddings(List.of("I am a success"))); + CountDownLatch chainExecuted = new CountDownLatch(1); + ActionFilterChain actionFilterChain = (task, action, request, listener) -> { + try { + BulkShardRequest bulkShardRequest = (BulkShardRequest) request; + assertNull(bulkShardRequest.getInferenceFieldMap()); + assertThat(bulkShardRequest.items().length, equalTo(3)); + + // item 0 is a failure + assertNotNull(bulkShardRequest.items()[0].getPrimaryResponse()); + assertTrue(bulkShardRequest.items()[0].getPrimaryResponse().isFailed()); + BulkItemResponse.Failure failure = bulkShardRequest.items()[0].getPrimaryResponse().getFailure(); + assertThat(failure.getCause().getCause().getMessage(), containsString("boom")); + + // item 1 is a success + assertNull(bulkShardRequest.items()[1].getPrimaryResponse()); + IndexRequest actualRequest = getIndexRequestOrNull(bulkShardRequest.items()[1].request()); + assertThat(XContentMapValues.extractValue("field1.text", actualRequest.sourceAsMap()), equalTo("I am a success")); + + // item 2 is a failure + assertNotNull(bulkShardRequest.items()[2].getPrimaryResponse()); + assertTrue(bulkShardRequest.items()[2].getPrimaryResponse().isFailed()); + failure = bulkShardRequest.items()[2].getPrimaryResponse().getFailure(); + assertThat(failure.getCause().getCause().getMessage(), containsString("boom")); + } finally { + chainExecuted.countDown(); + } + }; + ActionListener actionListener = mock(ActionListener.class); + Task task = mock(Task.class); + + Map inferenceFieldMap = Map.of( + "field1", + new InferenceFieldMetadata("field1", model.getInferenceEntityId(), new String[] { "field1" }) + ); + BulkItemRequest[] items = new BulkItemRequest[3]; + items[0] = new BulkItemRequest(0, new IndexRequest("index").source("field1", "I am a failure")); + items[1] = new BulkItemRequest(1, new IndexRequest("index").source("field1", "I am a success")); + items[2] = new BulkItemRequest(2, new IndexRequest("index").source("field1", "I am a failure")); + BulkShardRequest request = new BulkShardRequest(new ShardId("test", "test", 0), WriteRequest.RefreshPolicy.NONE, items); + request.setInferenceFieldMap(inferenceFieldMap); + filter.apply(task, TransportShardBulkAction.ACTION_NAME, request, actionListener, actionFilterChain); + awaitLatch(chainExecuted, 10, TimeUnit.SECONDS); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + public void testManyRandomDocs() throws Exception { + Map inferenceModelMap = new HashMap<>(); + int numModels = randomIntBetween(1, 5); + for (int i = 0; i < numModels; i++) { + StaticModel model = StaticModel.createRandomInstance(); + inferenceModelMap.put(model.getInferenceEntityId(), model); + } + + int numInferenceFields = randomIntBetween(1, 5); + Map inferenceFieldMap = new HashMap<>(); + for (int i = 0; i < numInferenceFields; i++) { + String field = randomAlphaOfLengthBetween(5, 10); + String inferenceId = randomFrom(inferenceModelMap.keySet()); + inferenceFieldMap.put(field, new InferenceFieldMetadata(field, inferenceId, new String[] { field })); + } + + int numRequests = randomIntBetween(100, 1000); + BulkItemRequest[] originalRequests = new BulkItemRequest[numRequests]; + BulkItemRequest[] modifiedRequests = new BulkItemRequest[numRequests]; + for (int id = 0; id < numRequests; id++) { + BulkItemRequest[] res = randomBulkItemRequest(inferenceModelMap, inferenceFieldMap); + originalRequests[id] = res[0]; + modifiedRequests[id] = res[1]; + } + + ShardBulkInferenceActionFilter filter = createFilter(threadPool, inferenceModelMap, randomIntBetween(10, 30)); + CountDownLatch chainExecuted = new CountDownLatch(1); + ActionFilterChain actionFilterChain = (task, action, request, listener) -> { + try { + assertThat(request, instanceOf(BulkShardRequest.class)); + BulkShardRequest bulkShardRequest = (BulkShardRequest) request; + assertNull(bulkShardRequest.getInferenceFieldMap()); + BulkItemRequest[] items = bulkShardRequest.items(); + assertThat(items.length, equalTo(originalRequests.length)); + for (int id = 0; id < items.length; id++) { + IndexRequest actualRequest = getIndexRequestOrNull(items[id].request()); + IndexRequest expectedRequest = getIndexRequestOrNull(modifiedRequests[id].request()); + try { + assertToXContentEquivalent(expectedRequest.source(), actualRequest.source(), expectedRequest.getContentType()); + } catch (Exception exc) { + throw new IllegalStateException(exc); + } + } + } finally { + chainExecuted.countDown(); + } + }; + ActionListener actionListener = mock(ActionListener.class); + Task task = mock(Task.class); + BulkShardRequest original = new BulkShardRequest(new ShardId("test", "test", 0), WriteRequest.RefreshPolicy.NONE, originalRequests); + original.setInferenceFieldMap(inferenceFieldMap); + filter.apply(task, TransportShardBulkAction.ACTION_NAME, original, actionListener, actionFilterChain); + awaitLatch(chainExecuted, 10, TimeUnit.SECONDS); + } + + @SuppressWarnings("unchecked") + private static ShardBulkInferenceActionFilter createFilter(ThreadPool threadPool, Map modelMap, int batchSize) { + ModelRegistry modelRegistry = mock(ModelRegistry.class); + Answer unparsedModelAnswer = invocationOnMock -> { + String id = (String) invocationOnMock.getArguments()[0]; + ActionListener listener = (ActionListener) invocationOnMock + .getArguments()[1]; + var model = modelMap.get(id); + if (model != null) { + listener.onResponse( + new ModelRegistry.UnparsedModel( + model.getInferenceEntityId(), + model.getTaskType(), + model.getServiceSettings().model(), + XContentHelper.convertToMap(JsonXContent.jsonXContent, Strings.toString(model.getTaskSettings()), false), + XContentHelper.convertToMap(JsonXContent.jsonXContent, Strings.toString(model.getSecretSettings()), false) + ) + ); + } else { + listener.onFailure(new ResourceNotFoundException("model id [{}] not found", id)); + } + return null; + }; + doAnswer(unparsedModelAnswer).when(modelRegistry).getModelWithSecrets(any(), any()); + + InferenceService inferenceService = mock(InferenceService.class); + Answer chunkedInferAnswer = invocationOnMock -> { + StaticModel model = (StaticModel) invocationOnMock.getArguments()[0]; + List inputs = (List) invocationOnMock.getArguments()[2]; + ActionListener> listener = (ActionListener< + List>) invocationOnMock.getArguments()[7]; + Runnable runnable = () -> { + List results = new ArrayList<>(); + for (String input : inputs) { + results.add(model.getResults(input)); + } + listener.onResponse(results); + }; + if (randomBoolean()) { + try { + threadPool.generic().execute(runnable); + } catch (Exception exc) { + listener.onFailure(exc); + } + } else { + runnable.run(); + } + return null; + }; + doAnswer(chunkedInferAnswer).when(inferenceService).chunkedInfer(any(), any(), any(), any(), any(), any(), any(), any()); + + Answer modelAnswer = invocationOnMock -> { + String inferenceId = (String) invocationOnMock.getArguments()[0]; + return modelMap.get(inferenceId); + }; + doAnswer(modelAnswer).when(inferenceService).parsePersistedConfigWithSecrets(any(), any(), any(), any()); + + InferenceServiceRegistry inferenceServiceRegistry = mock(InferenceServiceRegistry.class); + when(inferenceServiceRegistry.getService(any())).thenReturn(Optional.of(inferenceService)); + ShardBulkInferenceActionFilter filter = new ShardBulkInferenceActionFilter(inferenceServiceRegistry, modelRegistry, batchSize); + return filter; + } + + private static BulkItemRequest[] randomBulkItemRequest( + Map modelMap, + Map fieldInferenceMap + ) { + Map docMap = new LinkedHashMap<>(); + Map expectedDocMap = new LinkedHashMap<>(); + XContentType requestContentType = randomFrom(XContentType.values()); + for (var entry : fieldInferenceMap.values()) { + String field = entry.getName(); + var model = modelMap.get(entry.getInferenceId()); + String text = randomAlphaOfLengthBetween(10, 100); + docMap.put(field, text); + expectedDocMap.put(field, text); + if (model == null) { + // ignore results, the doc should fail with a resource not found exception + continue; + } + var result = randomSemanticText(field, model, List.of(text), requestContentType); + model.putResult(text, toChunkedResult(result)); + expectedDocMap.put(field, result); + } + + int requestId = randomIntBetween(0, Integer.MAX_VALUE); + return new BulkItemRequest[] { + new BulkItemRequest(requestId, new IndexRequest("index").source(docMap, requestContentType)), + new BulkItemRequest(requestId, new IndexRequest("index").source(expectedDocMap, requestContentType)) }; + } + + private static class StaticModel extends TestModel { + private final Map resultMap; + + StaticModel( + String inferenceEntityId, + TaskType taskType, + String service, + TestServiceSettings serviceSettings, + TestTaskSettings taskSettings, + TestSecretSettings secretSettings + ) { + super(inferenceEntityId, taskType, service, serviceSettings, taskSettings, secretSettings); + this.resultMap = new HashMap<>(); + } + + public static StaticModel createRandomInstance() { + TestModel testModel = TestModel.createRandomInstance(); + return new StaticModel( + testModel.getInferenceEntityId(), + testModel.getTaskType(), + randomAlphaOfLength(10), + testModel.getServiceSettings(), + testModel.getTaskSettings(), + testModel.getSecretSettings() + ); + } + + ChunkedInferenceServiceResults getResults(String text) { + return resultMap.getOrDefault(text, new ChunkedSparseEmbeddingResults(List.of())); + } + + void putResult(String text, ChunkedInferenceServiceResults result) { + resultMap.put(text, result); + } + } +} diff --git a/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java b/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java index a594c577dcdd..a397d9864d23 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java +++ b/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java @@ -21,9 +21,8 @@ public class InferenceRestIT extends ESClientYamlSuiteTestCase { public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .setting("xpack.security.enabled", "false") .setting("xpack.security.http.ssl.enabled", "false") - .plugin("x-pack-inference") .plugin("inference-service-test") - .distribution(DistributionType.INTEG_TEST) + .distribution(DistributionType.DEFAULT) .build(); public InferenceRestIT(final ClientYamlTestCandidate testCandidate) { diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml new file mode 100644 index 000000000000..067b79aec1fd --- /dev/null +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/30_semantic_text_inference.yml @@ -0,0 +1,773 @@ +setup: + - skip: + version: " - 8.14.99" + reason: semantic_text introduced in 8.15.0 + + - do: + inference.put: + task_type: sparse_embedding + inference_id: sparse-inference-id + body: > + { + "service": "test_service", + "service_settings": { + "model": "my_model", + "api_key": "abc64" + }, + "task_settings": { + } + } + - do: + inference.put: + task_type: text_embedding + inference_id: dense-inference-id + body: > + { + "service": "text_embedding_test_service", + "service_settings": { + "model": "my_model", + "dimensions": 10, + "similarity": "cosine", + "api_key": "abc64" + }, + "task_settings": { + } + } + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + sparse_field: + type: semantic_text + inference_id: sparse-inference-id + dense_field: + type: semantic_text + inference_id: dense-inference-id + non_inference_field: + type: text + +--- +"Calculates text expansion and embedding results for new documents": + - do: + index: + index: test-index + id: doc_1 + body: + sparse_field: "inference test" + dense_field: "another inference test" + non_inference_field: "non inference test" + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } + - match: { _source.dense_field.text: "another inference test" } + - exists: _source.dense_field.inference.chunks.0.embeddings + - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } + - match: { _source.non_inference_field: "non inference test" } + +--- +"Inference fields do not create new mappings": + - do: + index: + index: test-index + id: doc_1 + body: + sparse_field: "inference test" + dense_field: "another inference test" + non_inference_field: "non inference test" + + - do: + indices.get_mapping: + index: test-index + + - match: {test-index.mappings.properties.sparse_field.type: semantic_text} + - match: {test-index.mappings.properties.dense_field.type: semantic_text} + - match: {test-index.mappings.properties.non_inference_field.type: text} + - length: {test-index.mappings.properties: 3} + +--- +"Sparse vector results are indexed as nested chunks and searchable": + - do: + bulk: + index: test-index + refresh: true + body: | + {"index":{}} + {"sparse_field": ["you know, for testing", "now with chunks"]} + {"index":{}} + {"sparse_field": ["some more tests", "that include chunks"]} + + - do: + search: + index: test-index + body: + query: + nested: + path: sparse_field.inference.chunks + query: + text_expansion: + sparse_field.inference.chunks.embeddings: + model_id: sparse-inference-id + model_text: "you know, for testing" + + - match: { hits.total.value: 2 } + - match: { hits.total.relation: eq } + - length: { hits.hits.0._source.sparse_field.inference.chunks: 2 } + - length: { hits.hits.1._source.sparse_field.inference.chunks: 2 } + + # Search with inner hits + - do: + search: + _source: false + index: test-index + body: + query: + nested: + path: sparse_field.inference.chunks + inner_hits: + _source: false + fields: [sparse_field.inference.chunks.text] + query: + text_expansion: + sparse_field.inference.chunks.embeddings: + model_id: sparse-inference-id + model_text: "you know, for testing" + + - match: { hits.total.value: 2 } + - match: { hits.total.relation: eq } + - match: { hits.hits.0.inner_hits.sparse_field\.inference\.chunks.hits.total.value: 2 } + - match: { hits.hits.0.inner_hits.sparse_field\.inference\.chunks.hits.total.relation: eq } + + - length: { hits.hits.0.inner_hits.sparse_field\.inference\.chunks.hits.hits.0.fields.sparse_field\.inference\.chunks.0.text: 1 } + - length: { hits.hits.0.inner_hits.sparse_field\.inference\.chunks.hits.hits.1.fields.sparse_field\.inference\.chunks.0.text: 1 } + - length: { hits.hits.1.inner_hits.sparse_field\.inference\.chunks.hits.hits.0.fields.sparse_field\.inference\.chunks.0.text: 1 } + - length: { hits.hits.1.inner_hits.sparse_field\.inference\.chunks.hits.hits.1.fields.sparse_field\.inference\.chunks.0.text: 1 } + + +--- +"Dense vector results are indexed as nested chunks and searchable": + - do: + bulk: + index: test-index + refresh: true + body: | + {"index":{}} + {"dense_field": ["you know, for testing", "now with chunks"]} + {"index":{}} + {"dense_field": ["some more tests", "that include chunks"]} + + - do: + search: + index: test-index + body: + query: + nested: + path: dense_field.inference.chunks + query: + knn: + field: dense_field.inference.chunks.embeddings + query_vector_builder: + text_embedding: + model_id: dense-inference-id + model_text: "you know, for testing" + + - match: { hits.total.value: 2 } + - match: { hits.total.relation: eq } + - length: { hits.hits.0._source.dense_field.inference.chunks: 2 } + - length: { hits.hits.1._source.dense_field.inference.chunks: 2 } + + # Search with inner hits + - do: + search: + _source: false + index: test-index + body: + query: + nested: + path: dense_field.inference.chunks + inner_hits: + _source: false + fields: [dense_field.inference.chunks.text] + query: + knn: + field: dense_field.inference.chunks.embeddings + query_vector_builder: + text_embedding: + model_id: dense-inference-id + model_text: "you know, for testing" + + - match: { hits.total.value: 2 } + - match: { hits.total.relation: eq } + - match: { hits.hits.0.inner_hits.dense_field\.inference\.chunks.hits.total.value: 2 } + - match: { hits.hits.0.inner_hits.dense_field\.inference\.chunks.hits.total.relation: eq } + + - length: { hits.hits.0.inner_hits.dense_field\.inference\.chunks.hits.hits.0.fields.dense_field\.inference\.chunks.0.text: 1 } + - length: { hits.hits.0.inner_hits.dense_field\.inference\.chunks.hits.hits.1.fields.dense_field\.inference\.chunks.0.text: 1 } + - length: { hits.hits.1.inner_hits.dense_field\.inference\.chunks.hits.hits.0.fields.dense_field\.inference\.chunks.0.text: 1 } + - length: { hits.hits.1.inner_hits.dense_field\.inference\.chunks.hits.hits.1.fields.dense_field\.inference\.chunks.0.text: 1 } + + + +--- +"Updating non semantic_text fields does not recalculate embeddings": + - do: + index: + index: test-index + id: doc_1 + body: + sparse_field: "inference test" + dense_field: "another inference test" + non_inference_field: "non inference test" + + - do: + get: + index: test-index + id: doc_1 + + - set: { _source.sparse_field.inference.chunks.0.embeddings: sparse_field_embedding } + - set: { _source.dense_field.inference.chunks.0.embeddings: dense_field_embedding } + + - do: + update: + index: test-index + id: doc_1 + body: + doc: + non_inference_field: "another non inference test" + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "inference test" } + - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } + - match: { _source.sparse_field.inference.chunks.0.embeddings: $sparse_field_embedding } + - match: { _source.dense_field.text: "another inference test" } + - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } + - match: { _source.dense_field.inference.chunks.0.embeddings: $dense_field_embedding } + - match: { _source.non_inference_field: "another non inference test" } + +--- +"Updating semantic_text fields recalculates embeddings": + - do: + index: + index: test-index + id: doc_1 + body: + sparse_field: "inference test" + dense_field: "another inference test" + non_inference_field: "non inference test" + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "inference test" } + - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } + - match: { _source.dense_field.text: "another inference test" } + - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } + - match: { _source.non_inference_field: "non inference test" } + + - do: + bulk: + index: test-index + body: + - '{"update": {"_id": "doc_1"}}' + - '{"doc":{"sparse_field": "I am a test", "dense_field": "I am a teapot"}}' + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "I am a test" } + - match: { _source.sparse_field.inference.chunks.0.text: "I am a test" } + - match: { _source.dense_field.text: "I am a teapot" } + - match: { _source.dense_field.inference.chunks.0.text: "I am a teapot" } + - match: { _source.non_inference_field: "non inference test" } + + - do: + update: + index: test-index + id: doc_1 + body: + doc: + sparse_field: "updated inference test" + dense_field: "another updated inference test" + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "updated inference test" } + - match: { _source.sparse_field.inference.chunks.0.text: "updated inference test" } + - match: { _source.dense_field.text: "another updated inference test" } + - match: { _source.dense_field.inference.chunks.0.text: "another updated inference test" } + - match: { _source.non_inference_field: "non inference test" } + + - do: + bulk: + index: test-index + body: + - '{"update": {"_id": "doc_1"}}' + - '{"doc":{"sparse_field": "bulk inference test", "dense_field": "bulk updated inference test"}}' + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "bulk inference test" } + - match: { _source.sparse_field.inference.chunks.0.text: "bulk inference test" } + - match: { _source.dense_field.text: "bulk updated inference test" } + - match: { _source.dense_field.inference.chunks.0.text: "bulk updated inference test" } + - match: { _source.non_inference_field: "non inference test" } + +--- +"Reindex works for semantic_text fields": + - do: + index: + index: test-index + id: doc_1 + body: + sparse_field: "inference test" + dense_field: "another inference test" + non_inference_field: "non inference test" + + - do: + get: + index: test-index + id: doc_1 + + - set: { _source.sparse_field.inference.chunks.0.embeddings: sparse_field_embedding } + - set: { _source.dense_field.inference.chunks.0.embeddings: dense_field_embedding } + + - do: + indices.refresh: { } + + - do: + indices.create: + index: destination-index + body: + mappings: + properties: + sparse_field: + type: semantic_text + inference_id: sparse-inference-id + dense_field: + type: semantic_text + inference_id: dense-inference-id + non_inference_field: + type: text + + - do: + reindex: + wait_for_completion: true + body: + source: + index: test-index + dest: + index: destination-index + - do: + get: + index: destination-index + id: doc_1 + + - match: { _source.sparse_field.text: "inference test" } + - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } + - match: { _source.sparse_field.inference.chunks.0.embeddings: $sparse_field_embedding } + - match: { _source.dense_field.text: "another inference test" } + - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } + - match: { _source.dense_field.inference.chunks.0.embeddings: $dense_field_embedding } + - match: { _source.non_inference_field: "non inference test" } + +--- +"Fails for non-existent inference": + - do: + indices.create: + index: incorrect-test-index + body: + mappings: + properties: + sparse_field: + type: semantic_text + inference_id: non-existing-inference-id + non_inference_field: + type: text + + - do: + catch: missing + index: + index: incorrect-test-index + id: doc_1 + body: + sparse_field: "inference test" + non_inference_field: "non inference test" + + - match: { error.reason: "Inference id [non-existing-inference-id] not found for field [sparse_field]" } + + # Succeeds when semantic_text field is not used + - do: + index: + index: incorrect-test-index + id: doc_1 + body: + non_inference_field: "non inference test" + +--- +"Updates with script are not allowed": + - do: + bulk: + index: test-index + body: + - '{"index": {"_id": "doc_1"}}' + - '{"doc":{"sparse_field": "I am a test", "dense_field": "I am a teapot"}}' + + - do: + bulk: + index: test-index + body: + - '{"update": {"_id": "doc_1"}}' + - '{"script": "ctx._source.new_field = \"hello\"", "scripted_upsert": true}' + + - match: { errors: true } + - match: { items.0.update.status: 400 } + - match: { items.0.update.error.reason: "Cannot apply update with a script on indices that contain [semantic_text] field(s)" } + +--- +"semantic_text copy_to calculate inference for source fields": + - do: + indices.create: + index: test-copy-to-index + body: + mappings: + properties: + sparse_field: + type: semantic_text + inference_id: sparse-inference-id + source_field: + type: text + copy_to: sparse_field + another_source_field: + type: text + copy_to: sparse_field + + - do: + index: + index: test-copy-to-index + id: doc_1 + body: + source_field: "copy_to inference test" + sparse_field: "inference test" + another_source_field: "another copy_to inference test" + + - do: + get: + index: test-copy-to-index + id: doc_1 + + - match: { _source.sparse_field.text: "inference test" } + - length: { _source.sparse_field.inference.chunks: 3 } + - match: { _source.sparse_field.inference.chunks.0.text: "another copy_to inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.1.text: "copy_to inference test" } + - exists: _source.sparse_field.inference.chunks.1.embeddings + - match: { _source.sparse_field.inference.chunks.2.text: "inference test" } + - exists: _source.sparse_field.inference.chunks.2.embeddings + + +--- +"semantic_text copy_to needs values for every source field for updates": + - do: + indices.create: + index: test-copy-to-index + body: + mappings: + properties: + sparse_field: + type: semantic_text + inference_id: sparse-inference-id + source_field: + type: text + copy_to: sparse_field + another_source_field: + type: text + copy_to: sparse_field + + # Not every source field needed on creation + - do: + index: + index: test-copy-to-index + id: doc_1 + body: + source_field: "a single source field provided" + sparse_field: "inference test" + + # Every source field needed on bulk updates + - do: + bulk: + body: + - '{"update": {"_index": "test-copy-to-index", "_id": "doc_1"}}' + - '{"doc": {"source_field": "a single source field is kept as provided via bulk", "sparse_field": "updated inference test" }}' + + - match: { items.0.update.status: 400 } + - match: { items.0.update.error.reason: "Field [another_source_field] must be specified on an update request to calculate inference for field [sparse_field]" } + + +--- +"Update works for now - but will be unsupported later to avoid dealing with missing semantic_text fields content or copy_to fields": + - do: + index: + index: test-index + id: doc_1 + body: + sparse_field: "inference test" + dense_field: "another inference test" + non_inference_field: "non inference test" + + - do: + update: + index: test-index + id: doc_1 + body: + doc: { "sparse_field": "updated inference test", "dense_field": "another updated inference test", "non_inference_field": "updated non inference test" } + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "updated inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "updated inference test" } + - match: { _source.dense_field.text: "another updated inference test" } + - exists: _source.dense_field.inference.chunks.0.embeddings + - match: { _source.dense_field.inference.chunks.0.text: "another updated inference test" } + - match: { _source.non_inference_field: "updated non inference test" } + +--- +"Calculates text expansion results for bulk updates - index": + - do: + bulk: + body: + - '{"index": {"_index": "test-index", "_id": "doc_1"}}' + - '{"sparse_field": "inference test", "dense_field": "another inference test", "non_inference_field": "non inference test"}' + + - match: { errors: false } + - match: { items.0.index.result: "created" } + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } + - match: { _source.dense_field.text: "another inference test" } + - exists: _source.dense_field.inference.chunks.0.embeddings + - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } + - match: { _source.non_inference_field: "non inference test" } + +--- +"Calculates text expansion results for bulk updates - update": + - do: + bulk: + body: + - '{"index": {"_index": "test-index", "_id": "doc_1"}}' + - '{"sparse_field": "inference test", "dense_field": "another inference test", "non_inference_field": "non inference test"}' + + - match: { errors: false } + - match: { items.0.index.result: "created" } + + - do: + bulk: + body: + - '{"update": {"_index": "test-index", "_id": "doc_1"}}' + - '{"doc": { "sparse_field": "updated inference test", "dense_field": "another updated inference test", "non_inference_field": "updated non inference test" }}' + + - match: { errors: false } + - match: { items.0.update.result: "updated" } + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "updated inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "updated inference test" } + - match: { _source.dense_field.text: "another updated inference test" } + - exists: _source.dense_field.inference.chunks.0.embeddings + - match: { _source.dense_field.inference.chunks.0.text: "another updated inference test" } + - match: { _source.non_inference_field: "updated non inference test" } + + # Script update not supported + - do: + bulk: + body: + - '{"update": {"_index": "test-index", "_id": "doc_1"}}' + - '{"script": {"source": {"ctx.sparse_field": "updated inference test"}}}' + + - match: { errors: true } + - match: { items.0.update.status: 400 } + - match: { items.0.update.error.reason: "Cannot apply update with a script on indices that contain [semantic_text] field(s)" } + +--- +"Calculates text expansion results for bulk updates - upsert": + # Initial update fails + - do: + bulk: + body: + - '{"update": {"_index": "test-index", "_id": "doc_1"}}' + - '{"doc": { "sparse_field": "inference test", "dense_field": "another inference test", "non_inference_field": "non inference test" }}' + + - match: { errors: true } + - match: { items.0.update.status: 404 } + + # Update as upsert + - do: + bulk: + body: + - '{"update": {"_index": "test-index", "_id": "doc_1"}}' + - '{"doc": { "sparse_field": "inference test", "dense_field": "another inference test", "non_inference_field": "non inference test" }, "doc_as_upsert": true}' + + - match: { errors: false } + - match: { items.0.update.result: "created" } + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } + - match: { _source.dense_field.text: "another inference test" } + - exists: _source.dense_field.inference.chunks.0.embeddings + - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } + - match: { _source.non_inference_field: "non inference test" } + + - do: + bulk: + body: + - '{"update": {"_index": "test-index", "_id": "doc_1"}}' + - '{"doc": { "sparse_field": "updated inference test", "dense_field": "another updated inference test", "non_inference_field": "updated non inference test" }, "doc_as_upsert": true}' + + - match: { errors: false } + - match: { items.0.update.result: "updated" } + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "updated inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "updated inference test" } + - match: { _source.dense_field.text: "another updated inference test" } + - exists: _source.dense_field.inference.chunks.0.embeddings + - match: { _source.dense_field.inference.chunks.0.text: "another updated inference test" } + - match: { _source.non_inference_field: "updated non inference test" } + + +--- +"Update by query picks up new semantic_text fields": + + - do: + indices.create: + index: mapping-update-index + body: + mappings: + dynamic: false + properties: + non_inference_field: + type: text + + - do: + index: + index: mapping-update-index + id: doc_1 + refresh: true + body: + sparse_field: "inference test" + dense_field: "another inference test" + non_inference_field: "non inference test" + + - do: + indices.put_mapping: + index: mapping-update-index + body: + properties: + sparse_field: + type: semantic_text + inference_id: sparse-inference-id + dense_field: + type: semantic_text + inference_id: dense-inference-id + non_inference_field: + type: text + + - do: + update_by_query: + wait_for_completion: true + index: mapping-update-index + + - match: { updated: 1 } + + - do: + get: + index: mapping-update-index + id: doc_1 + + - match: { _source.sparse_field.text: "inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.sparse_field.inference.chunks.0.text: "inference test" } + - match: { _source.dense_field.text: "another inference test" } + - exists: _source.dense_field.inference.chunks.0.embeddings + - match: { _source.dense_field.inference.chunks.0.text: "another inference test" } + - match: { _source.non_inference_field: "non inference test" } + +--- +"Update by query works for scripts": + + - do: + index: + index: test-index + id: doc_1 + refresh: true + body: + sparse_field: "inference test" + dense_field: "another inference test" + non_inference_field: "non inference test" + + - do: + update_by_query: + wait_for_completion: true + index: test-index + body: { "script": "ctx._source.sparse_field = 'updated inference test'; ctx._source.dense_field = 'another updated inference test'" } + + - match: { updated: 1 } + + - do: + get: + index: test-index + id: doc_1 + + - match: { _source.sparse_field.text: "updated inference test" } + - match: { _source.sparse_field.inference.chunks.0.text: "updated inference test" } + - exists: _source.sparse_field.inference.chunks.0.embeddings + - match: { _source.dense_field.text: "another updated inference test" } + - match: { _source.dense_field.inference.chunks.0.text: "another updated inference test" } + - exists: _source.dense_field.inference.chunks.0.embeddings diff --git a/x-pack/plugin/ml/qa/ml-inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/CoordinatedInferenceIngestIT.java b/x-pack/plugin/ml/qa/ml-inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/CoordinatedInferenceIngestIT.java index 4d90d2a18685..058b64894f8b 100644 --- a/x-pack/plugin/ml/qa/ml-inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/CoordinatedInferenceIngestIT.java +++ b/x-pack/plugin/ml/qa/ml-inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/CoordinatedInferenceIngestIT.java @@ -59,10 +59,10 @@ public void testIngestWithMultipleModelTypes() throws IOException { assertThat(simulatedDocs, hasSize(2)); assertEquals(inferenceServiceModelId, MapHelper.dig("doc._source.ml.model_id", simulatedDocs.get(0))); var sparseEmbedding = (Map) MapHelper.dig("doc._source.ml.body", simulatedDocs.get(0)); - assertEquals(Double.valueOf(1.0), sparseEmbedding.get("1")); + assertNotNull(sparseEmbedding.get("feature_1")); assertEquals(inferenceServiceModelId, MapHelper.dig("doc._source.ml.model_id", simulatedDocs.get(1))); sparseEmbedding = (Map) MapHelper.dig("doc._source.ml.body", simulatedDocs.get(1)); - assertEquals(Double.valueOf(1.0), sparseEmbedding.get("1")); + assertNotNull(sparseEmbedding.get("feature_1")); } { From c22ec19427e640342dc246251c5161bc79124d2a Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 13 May 2024 10:19:41 +0100 Subject: [PATCH 076/119] Introduce `assertNoSuccessListener` utility (#108547) Similar to `assertNoFailureListener`, sometimes we need a listener which asserts it is not completed successfully. --- .../JoinValidationServiceTests.java | 15 ++----- .../cluster/service/MasterServiceTests.java | 39 ++++++++----------- .../action/support/ActionTestUtils.java | 24 ++++++++++++ .../support/CancellableActionTestPlugin.java | 20 +++------- .../ml/dataframe/DestinationIndexTests.java | 9 ++--- 5 files changed, 53 insertions(+), 54 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/JoinValidationServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/JoinValidationServiceTests.java index 6df9260b2bcc..79203899b665 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/JoinValidationServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/JoinValidationServiceTests.java @@ -60,6 +60,7 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; +import static org.elasticsearch.action.support.ActionTestUtils.assertNoSuccessListener; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; @@ -295,17 +296,9 @@ protected void onSendRequest(long requestId, String action, TransportRequest req assertSame(node, joiningNode); assertEquals(JoinValidationService.JOIN_VALIDATE_ACTION_NAME, action); - final var listener = new ActionListener() { - @Override - public void onResponse(TransportResponse transportResponse) { - fail("should not succeed"); - } - - @Override - public void onFailure(Exception e) { - handleError(requestId, new RemoteTransportException(node.getName(), node.getAddress(), action, e)); - } - }; + final ActionListener listener = assertNoSuccessListener( + e -> handleError(requestId, new RemoteTransportException(node.getName(), node.getAddress(), action, e)) + ); try (var ignored = NamedWriteableRegistryTests.ignoringUnknownNamedWriteables(); var out = new BytesStreamOutput()) { request.writeTo(out); diff --git a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java index 50030143ec35..617e1cb09c35 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java @@ -80,6 +80,7 @@ import java.util.stream.Collectors; import static java.util.Collections.emptySet; +import static org.elasticsearch.action.support.ActionTestUtils.assertNoSuccessListener; import static org.elasticsearch.cluster.service.MasterService.MAX_TASK_DESCRIPTION_CHARS; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.contains; @@ -1041,30 +1042,22 @@ public void onFailure(Exception e) { threadContext.putHeader(testContextHeaderName, testContextHeaderValue); final var expectFailure = randomBoolean(); final var taskComplete = new AtomicBoolean(); - final var task = new Task(expectFailure, testResponseHeaderValue, new ActionListener<>() { - @Override - public void onResponse(ClusterState clusterState) { - throw new AssertionError("should not succeed"); + final var task = new Task(expectFailure, testResponseHeaderValue, assertNoSuccessListener(e -> { + assertEquals(testContextHeaderValue, threadContext.getHeader(testContextHeaderName)); + assertEquals(List.of(testResponseHeaderValue), threadContext.getResponseHeaders().get(testResponseHeaderName)); + assertThat(e, instanceOf(FailedToCommitClusterStateException.class)); + assertThat(e.getMessage(), equalTo(publicationFailedExceptionMessage)); + if (expectFailure) { + assertThat(e.getSuppressed().length, greaterThan(0)); + var suppressed = e.getSuppressed()[0]; + assertThat(suppressed, instanceOf(ElasticsearchException.class)); + assertThat(suppressed.getMessage(), equalTo(taskFailedExceptionMessage)); } - - @Override - public void onFailure(Exception e) { - assertEquals(testContextHeaderValue, threadContext.getHeader(testContextHeaderName)); - assertEquals(List.of(testResponseHeaderValue), threadContext.getResponseHeaders().get(testResponseHeaderName)); - assertThat(e, instanceOf(FailedToCommitClusterStateException.class)); - assertThat(e.getMessage(), equalTo(publicationFailedExceptionMessage)); - if (expectFailure) { - assertThat(e.getSuppressed().length, greaterThan(0)); - var suppressed = e.getSuppressed()[0]; - assertThat(suppressed, instanceOf(ElasticsearchException.class)); - assertThat(suppressed.getMessage(), equalTo(taskFailedExceptionMessage)); - } - assertNotNull(publishedState.get()); - assertNotSame(stateBeforeFailure, publishedState.get()); - assertTrue(taskComplete.compareAndSet(false, true)); - publishFailureCountdown.countDown(); - } - }); + assertNotNull(publishedState.get()); + assertNotSame(stateBeforeFailure, publishedState.get()); + assertTrue(taskComplete.compareAndSet(false, true)); + publishFailureCountdown.countDown(); + })); queue.submitTask("test", task, null); } diff --git a/test/framework/src/main/java/org/elasticsearch/action/support/ActionTestUtils.java b/test/framework/src/main/java/org/elasticsearch/action/support/ActionTestUtils.java index 187a8b6e4eab..023305101f4c 100644 --- a/test/framework/src/main/java/org/elasticsearch/action/support/ActionTestUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/action/support/ActionTestUtils.java @@ -22,6 +22,9 @@ import java.util.Map; import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; + +import static org.elasticsearch.test.ESTestCase.fail; public class ActionTestUtils { @@ -77,6 +80,27 @@ public static ActionListener assertNoFailureListener(CheckedConsumer ActionListener assertNoSuccessListener(Consumer consumer) { + return new ActionListener<>() { + @Override + public void onResponse(T result) { + fail(null, "unexpected success with result [%s] while expecting to handle failure with [%s]", result, consumer); + } + + @Override + public void onFailure(Exception e) { + try { + consumer.accept(e); + } catch (Exception e2) { + if (e2 != e) { + e2.addSuppressed(e); + } + fail(e2, "unexpected failure in onFailure handler for [%s]", consumer); + } + } + }; + } + public static ResponseListener wrapAsRestResponseListener(ActionListener listener) { return new ResponseListener() { @Override diff --git a/test/framework/src/main/java/org/elasticsearch/action/support/CancellableActionTestPlugin.java b/test/framework/src/main/java/org/elasticsearch/action/support/CancellableActionTestPlugin.java index 115ea63fb243..dad0e3b613ef 100644 --- a/test/framework/src/main/java/org/elasticsearch/action/support/CancellableActionTestPlugin.java +++ b/test/framework/src/main/java/org/elasticsearch/action/support/CancellableActionTestPlugin.java @@ -26,6 +26,7 @@ import static org.elasticsearch.ExceptionsHelper.unwrapCause; import static org.elasticsearch.action.support.ActionTestUtils.assertNoFailureListener; +import static org.elasticsearch.action.support.ActionTestUtils.assertNoSuccessListener; import static org.elasticsearch.test.ESIntegTestCase.internalCluster; import static org.elasticsearch.test.ESTestCase.asInstanceOf; import static org.elasticsearch.test.ESTestCase.randomInt; @@ -37,7 +38,6 @@ import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; /** * Utility plugin that captures the invocation of an action on a node after the task has been registered with the {@link TaskManager}, @@ -128,19 +128,11 @@ public void app if (capturingListener != null) { final var cancellableTask = asInstanceOf(CancellableTask.class, task); capturingListener.addListener(assertNoFailureListener(captured -> { - cancellableTask.addListener(() -> chain.proceed(task, action, request, new ActionListener<>() { - @Override - public void onResponse(Response response) { - fail("cancelled action should not succeed, but got " + response); - } - - @Override - public void onFailure(Exception e) { - assertThat(unwrapCause(e), instanceOf(TaskCancelledException.class)); - listener.onFailure(e); - captured.countDownLatch().countDown(); - } - })); + cancellableTask.addListener(() -> chain.proceed(task, action, request, assertNoSuccessListener(e -> { + assertThat(unwrapCause(e), instanceOf(TaskCancelledException.class)); + listener.onFailure(e); + captured.countDownLatch().countDown(); + }))); assertFalse(cancellableTask.isCancelled()); captured.doCancel().run(); })); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndexTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndexTests.java index 2f3ccaa313b0..f0f7dec448d9 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndexTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/DestinationIndexTests.java @@ -61,6 +61,7 @@ import java.util.Map; import static java.util.Collections.singletonMap; +import static org.elasticsearch.action.support.ActionTestUtils.assertNoSuccessListener; import static org.elasticsearch.common.xcontent.support.XContentMapValues.extractValue; import static org.elasticsearch.xpack.ml.DefaultMachineLearningExtension.ANALYTICS_DEST_INDEX_ALLOWED_SETTINGS; import static org.hamcrest.Matchers.arrayContaining; @@ -334,10 +335,7 @@ private Map testCreateDestinationIndex(DataFrameAnalysis analysi clock, config, ANALYTICS_DEST_INDEX_ALLOWED_SETTINGS, - ActionListener.wrap( - response -> fail("should not succeed"), - e -> assertThat(e.getMessage(), Matchers.matchesRegex(finalErrorMessage)) - ) + assertNoSuccessListener(e -> assertThat(e.getMessage(), Matchers.matchesRegex(finalErrorMessage))) ); return null; @@ -578,8 +576,7 @@ public void testCreateDestinationIndex_ResultsFieldsExistsInSourceIndex() { clock, config, ANALYTICS_DEST_INDEX_ALLOWED_SETTINGS, - ActionListener.wrap( - response -> fail("should not succeed"), + assertNoSuccessListener( e -> assertThat( e.getMessage(), equalTo("A field that matches the dest.results_field [ml] already exists; please set a different results_field") From 69ce9ed8c602a6df18295210866ea90e30c83b56 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Mon, 13 May 2024 10:43:46 +0100 Subject: [PATCH 077/119] Collapse transport versions into 8.13 release version (#107899) --- .../elasticsearch/ElasticsearchException.java | 14 +- .../org/elasticsearch/TransportVersions.java | 38 +--- .../node/hotthreads/NodeHotThreads.java | 4 +- .../NodesReloadSecureSettingsRequest.java | 4 +- .../node/stats/TransportNodesStatsAction.java | 7 +- .../indices/create/CreateIndexRequest.java | 4 +- .../resolve/ResolveClusterActionRequest.java | 12 +- .../resolve/ResolveClusterActionResponse.java | 6 +- .../indices/resolve/ResolveClusterInfo.java | 6 +- .../indices/rollover/RolloverRequest.java | 4 +- .../indices/rollover/RolloverResponse.java | 7 +- .../FieldCapabilitiesNodeRequest.java | 4 +- .../fieldcaps/FieldCapabilitiesRequest.java | 4 +- .../action/index/IndexRequest.java | 15 +- .../cluster/SnapshotsInProgress.java | 10 +- .../ClusterFormationFailureHelper.java | 4 +- .../cluster/metadata/DataStream.java | 4 +- .../cluster/metadata/DesiredNode.java | 4 +- .../elasticsearch/health/node/HealthInfo.java | 6 +- .../node/UpdateHealthInfoCacheAction.java | 8 +- .../histogram/InternalDateHistogram.java | 6 +- .../search/vectors/ExactKnnQueryBuilder.java | 2 +- .../vectors/KnnScoreDocQueryBuilder.java | 4 +- .../search/vectors/KnnVectorQueryBuilder.java | 4 +- .../snapshots/SnapshotsService.java | 2 +- .../TransportResolveClusterActionTests.java | 6 +- .../core/async/GetAsyncStatusRequest.java | 4 +- .../inference/action/InferenceAction.java | 17 +- .../ml/action/CoordinatedInferenceAction.java | 6 +- .../InferTrainedModelDeploymentAction.java | 4 +- .../action/GetTransformStatsAction.java | 4 +- .../action/InferenceActionRequestTests.java | 167 +----------------- ...oordinatedInferenceActionRequestTests.java | 11 +- .../compute/lucene/LuceneOperator.java | 4 +- .../xpack/esql/action/EsqlQueryResponse.java | 4 +- .../xpack/esql/io/stream/PlanNamedTypes.java | 24 +-- .../xpack/esql/plugin/DataNodeRequest.java | 4 +- .../cohere/CohereServiceSettings.java | 2 +- .../CohereEmbeddingsServiceSettings.java | 2 +- .../CohereEmbeddingsTaskSettings.java | 2 +- .../CustomElandInternalServiceSettings.java | 5 +- .../ElasticsearchInternalServiceSettings.java | 5 +- ...lingualE5SmallInternalServiceSettings.java | 8 - .../OpenAiEmbeddingsServiceSettings.java | 11 +- .../ml/queries/TextExpansionQueryBuilder.java | 4 +- .../queries/WeightedTokensQueryBuilder.java | 2 +- .../shutdown/DeleteShutdownNodeAction.java | 4 +- .../xpack/shutdown/PutShutdownNodeAction.java | 4 +- 48 files changed, 130 insertions(+), 357 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/ElasticsearchException.java b/server/src/main/java/org/elasticsearch/ElasticsearchException.java index 31768ab85474..2983a2d62de7 100644 --- a/server/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/server/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -1902,18 +1902,8 @@ private enum ElasticsearchExceptionHandle { 175, TransportVersions.V_8_12_0 ), - SEARCH_TIMEOUT_EXCEPTION( - SearchTimeoutException.class, - SearchTimeoutException::new, - 176, - TransportVersions.SEARCH_TIMEOUT_EXCEPTION_ADDED - ), - INGEST_GRAPH_STRUCTURE_EXCEPTION( - GraphStructureException.class, - GraphStructureException::new, - 177, - TransportVersions.INGEST_GRAPH_STRUCTURE_EXCEPTION - ), + SEARCH_TIMEOUT_EXCEPTION(SearchTimeoutException.class, SearchTimeoutException::new, 176, TransportVersions.V_8_13_0), + INGEST_GRAPH_STRUCTURE_EXCEPTION(GraphStructureException.class, GraphStructureException::new, 177, TransportVersions.V_8_13_0), FAILURE_INDEX_NOT_SUPPORTED_EXCEPTION( FailureIndexNotSupportedException.class, FailureIndexNotSupportedException::new, diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 9589885d6df3..95961546f1e1 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -97,43 +97,9 @@ static TransportVersion def(int id) { public static final TransportVersion V_8_10_X = def(8_500_061); public static final TransportVersion V_8_11_X = def(8_512_00_1); public static final TransportVersion V_8_12_0 = def(8_560_00_0); - public static final TransportVersion DATE_HISTOGRAM_SUPPORT_DOWNSAMPLED_TZ_8_12_PATCH = def(8_560_00_1); - public static final TransportVersion NODE_STATS_REQUEST_SIMPLIFIED = def(8_561_00_0); - public static final TransportVersion TEXT_EXPANSION_TOKEN_PRUNING_CONFIG_ADDED = def(8_562_00_0); - public static final TransportVersion ESQL_ASYNC_QUERY = def(8_563_00_0); - public static final TransportVersion ESQL_STATUS_INCLUDE_LUCENE_QUERIES = def(8_564_00_0); - public static final TransportVersion ESQL_CLUSTER_ALIAS = def(8_565_00_0); - public static final TransportVersion SNAPSHOTS_IN_PROGRESS_TRACKING_REMOVING_NODES_ADDED = def(8_566_00_0); - public static final TransportVersion SMALLER_RELOAD_SECURE_SETTINGS_REQUEST = def(8_567_00_0); - public static final TransportVersion UPDATE_API_KEY_EXPIRATION_TIME_ADDED = def(8_568_00_0); - public static final TransportVersion LAZY_ROLLOVER_ADDED = def(8_569_00_0); - public static final TransportVersion ESQL_PLAN_POINT_LITERAL_WKB = def(8_570_00_0); - public static final TransportVersion HOT_THREADS_AS_BYTES = def(8_571_00_0); - public static final TransportVersion ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED = def(8_572_00_0); - public static final TransportVersion ESQL_ENRICH_POLICY_CCQ_MODE = def(8_573_00_0); - public static final TransportVersion DATE_HISTOGRAM_SUPPORT_DOWNSAMPLED_TZ = def(8_574_00_0); - public static final TransportVersion PEERFINDER_REPORTS_PEERS_MASTERS = def(8_575_00_0); - public static final TransportVersion ESQL_MULTI_CLUSTERS_ENRICH = def(8_576_00_0); - public static final TransportVersion NESTED_KNN_MORE_INNER_HITS = def(8_577_00_0); - public static final TransportVersion REQUIRE_DATA_STREAM_ADDED = def(8_578_00_0); - public static final TransportVersion ML_INFERENCE_COHERE_EMBEDDINGS_ADDED = def(8_579_00_0); - public static final TransportVersion DESIRED_NODE_VERSION_OPTIONAL_STRING = def(8_580_00_0); - public static final TransportVersion ML_INFERENCE_REQUEST_INPUT_TYPE_UNSPECIFIED_ADDED = def(8_581_00_0); - public static final TransportVersion ASYNC_SEARCH_STATUS_SUPPORTS_KEEP_ALIVE = def(8_582_00_0); - public static final TransportVersion KNN_QUERY_NUMCANDS_AS_OPTIONAL_PARAM = def(8_583_00_0); - public static final TransportVersion TRANSFORM_GET_BASIC_STATS = def(8_584_00_0); - public static final TransportVersion NLP_DOCUMENT_CHUNKING_ADDED = def(8_585_00_0); - public static final TransportVersion SEARCH_TIMEOUT_EXCEPTION_ADDED = def(8_586_00_0); - public static final TransportVersion ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED = def(8_587_00_0); - public static final TransportVersion HEALTH_INFO_ENRICHED_WITH_REPOS = def(8_588_00_0); - public static final TransportVersion RESOLVE_CLUSTER_ENDPOINT_ADDED = def(8_589_00_0); - public static final TransportVersion FIELD_CAPS_FIELD_HAS_VALUE = def(8_590_00_0); - public static final TransportVersion ML_INFERENCE_REQUEST_INPUT_TYPE_CLASS_CLUSTER_ADDED = def(8_591_00_0); - public static final TransportVersion ML_DIMENSIONS_SET_BY_USER_ADDED = def(8_592_00_0); - public static final TransportVersion INDEX_REQUEST_NORMALIZED_BYTES_PARSED = def(8_593_00_0); - public static final TransportVersion INGEST_GRAPH_STRUCTURE_EXCEPTION = def(8_594_00_0); + public static final TransportVersion V_8_12_1 = def(8_560_00_1); public static final TransportVersion V_8_13_0 = def(8_595_00_0); - public static final TransportVersion SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_13 = def(8_595_00_1); + public static final TransportVersion V_8_13_4 = def(8_595_00_1); // 8.14.0+ public static final TransportVersion RANDOM_AGG_SHARD_SEED = def(8_596_00_0); public static final TransportVersion ESQL_TIMINGS = def(8_597_00_0); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodeHotThreads.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodeHotThreads.java index 1118a6318ddf..ef5d7f5e74ef 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodeHotThreads.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodeHotThreads.java @@ -27,7 +27,7 @@ public class NodeHotThreads extends BaseNodeResponse { NodeHotThreads(StreamInput in) throws IOException { super(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.HOT_THREADS_AS_BYTES)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { bytes = in.readReleasableBytesReference(); } else { bytes = ReleasableBytesReference.wrap(new BytesArray(in.readString().getBytes(StandardCharsets.UTF_8))); @@ -56,7 +56,7 @@ public java.io.Reader getHotThreadsReader() { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.HOT_THREADS_AS_BYTES)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeBytesReference(bytes); } else { out.writeString(bytes.utf8ToString()); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequest.java index c24833dca49e..a83a09af642f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequest.java @@ -97,7 +97,7 @@ public static class NodeRequest extends TransportRequest { NodeRequest(StreamInput in) throws IOException { super(in); - if (in.getTransportVersion().before(TransportVersions.SMALLER_RELOAD_SECURE_SETTINGS_REQUEST)) { + if (in.getTransportVersion().before(TransportVersions.V_8_13_0)) { TaskId.readFromStream(in); in.readStringArray(); in.readOptionalArray(DiscoveryNode::new, DiscoveryNode[]::new); @@ -131,7 +131,7 @@ public void writeTo(StreamOutput out) throws IOException { assert hasReferences(); super.writeTo(out); - if (out.getTransportVersion().before(TransportVersions.SMALLER_RELOAD_SECURE_SETTINGS_REQUEST)) { + if (out.getTransportVersion().before(TransportVersions.V_8_13_0)) { TaskId.EMPTY_TASK_ID.writeTo(out); out.writeStringArray(Strings.EMPTY_ARRAY); out.writeOptionalArray(StreamOutput::writeWriteable, null); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java index 9c7cbc0ec193..4829f309e1f3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/TransportNodesStatsAction.java @@ -8,6 +8,7 @@ package org.elasticsearch.action.admin.cluster.node.stats; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.FailedNodeException; @@ -38,8 +39,6 @@ import java.util.Map; import java.util.Set; -import static org.elasticsearch.TransportVersions.NODE_STATS_REQUEST_SIMPLIFIED; - public class TransportNodesStatsAction extends TransportNodesAction< NodesStatsRequest, NodesStatsResponse, @@ -158,7 +157,7 @@ public static class NodeStatsRequest extends TransportRequest { public NodeStatsRequest(StreamInput in) throws IOException { super(in); - if (in.getTransportVersion().onOrAfter(NODE_STATS_REQUEST_SIMPLIFIED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { this.nodesStatsRequestParameters = new NodesStatsRequestParameters(in); this.nodesIds = in.readStringArray(); } else { @@ -191,7 +190,7 @@ public String getDescription() { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - if (out.getTransportVersion().onOrAfter(NODE_STATS_REQUEST_SIMPLIFIED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { this.nodesStatsRequestParameters.writeTo(out); out.writeStringArrayNullable(nodesIds); } else { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java index 2ec6db339b6e..21f187f05258 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java @@ -104,7 +104,7 @@ public CreateIndexRequest(StreamInput in) throws IOException { if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_12_0)) { origin = in.readString(); } - if (in.getTransportVersion().onOrAfter(TransportVersions.REQUIRE_DATA_STREAM_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { requireDataStream = in.readBoolean(); } else { requireDataStream = false; @@ -487,7 +487,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_7_12_0)) { out.writeString(origin); } - if (out.getTransportVersion().onOrAfter(TransportVersions.REQUIRE_DATA_STREAM_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeOptionalBoolean(this.requireDataStream); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionRequest.java index 1649e4587d63..118f13904597 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionRequest.java @@ -62,10 +62,10 @@ public ResolveClusterActionRequest(String[] names, IndicesOptions indicesOptions public ResolveClusterActionRequest(StreamInput in) throws IOException { super(in); - if (in.getTransportVersion().before(TransportVersions.RESOLVE_CLUSTER_ENDPOINT_ADDED)) { + if (in.getTransportVersion().before(TransportVersions.V_8_13_0)) { throw new UnsupportedOperationException( - "ResolveClusterAction requires at least Transport Version " - + TransportVersions.RESOLVE_CLUSTER_ENDPOINT_ADDED.toReleaseVersion() + "ResolveClusterAction requires at least version " + + TransportVersions.V_8_13_0.toReleaseVersion() + " but was " + in.getTransportVersion().toReleaseVersion() ); @@ -78,10 +78,10 @@ public ResolveClusterActionRequest(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - if (out.getTransportVersion().before(TransportVersions.RESOLVE_CLUSTER_ENDPOINT_ADDED)) { + if (out.getTransportVersion().before(TransportVersions.V_8_13_0)) { throw new UnsupportedOperationException( - "ResolveClusterAction requires at least Transport Version " - + TransportVersions.RESOLVE_CLUSTER_ENDPOINT_ADDED.toReleaseVersion() + "ResolveClusterAction requires at least version " + + TransportVersions.V_8_13_0.toReleaseVersion() + " but was " + out.getTransportVersion().toReleaseVersion() ); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionResponse.java index ee2e3d60dc56..892b34df2b86 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterActionResponse.java @@ -44,10 +44,10 @@ public ResolveClusterActionResponse(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().before(TransportVersions.RESOLVE_CLUSTER_ENDPOINT_ADDED)) { + if (out.getTransportVersion().before(TransportVersions.V_8_13_0)) { throw new UnsupportedOperationException( - "ResolveClusterAction requires at least Transport Version " - + TransportVersions.RESOLVE_CLUSTER_ENDPOINT_ADDED.toReleaseVersion() + "ResolveClusterAction requires at least version " + + TransportVersions.V_8_13_0.toReleaseVersion() + " but was " + out.getTransportVersion().toReleaseVersion() ); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterInfo.java b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterInfo.java index 578b4ae547a0..dc2416a1b1ba 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterInfo.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveClusterInfo.java @@ -65,10 +65,10 @@ public ResolveClusterInfo(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().before(TransportVersions.RESOLVE_CLUSTER_ENDPOINT_ADDED)) { + if (out.getTransportVersion().before(TransportVersions.V_8_13_0)) { throw new UnsupportedOperationException( - "ResolveClusterAction requires at least Transport Version " - + TransportVersions.RESOLVE_CLUSTER_ENDPOINT_ADDED.toReleaseVersion() + "ResolveClusterAction requires at least version " + + TransportVersions.V_8_13_0.toReleaseVersion() + " but was " + out.getTransportVersion().toReleaseVersion() ); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java index 28ef2f644af0..1f582f95aea9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java @@ -110,7 +110,7 @@ public RolloverRequest(StreamInput in) throws IOException { dryRun = in.readBoolean(); conditions = new RolloverConditions(in); createIndexRequest = new CreateIndexRequest(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.LAZY_ROLLOVER_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { lazy = in.readBoolean(); } else { lazy = false; @@ -165,7 +165,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(dryRun); conditions.writeTo(out); createIndexRequest.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.LAZY_ROLLOVER_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeBoolean(lazy); } if (out.getTransportVersion().onOrAfter(TransportVersions.FAILURE_STORE_ROLLOVER)) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverResponse.java index 360ea59e6a29..04b9f6498a3a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverResponse.java @@ -8,6 +8,7 @@ package org.elasticsearch.action.admin.indices.rollover; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.support.master.ShardsAcknowledgedResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -20,8 +21,6 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.TransportVersions.LAZY_ROLLOVER_ADDED; - /** * Response object for {@link RolloverRequest} API * @@ -59,7 +58,7 @@ public final class RolloverResponse extends ShardsAcknowledgedResponse implement dryRun = in.readBoolean(); rolledOver = in.readBoolean(); shardsAcknowledged = in.readBoolean(); - if (in.getTransportVersion().onOrAfter(LAZY_ROLLOVER_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { lazy = in.readBoolean(); } else { lazy = false; @@ -142,7 +141,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(dryRun); out.writeBoolean(rolledOver); out.writeBoolean(shardsAcknowledged); - if (out.getTransportVersion().onOrAfter(LAZY_ROLLOVER_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeBoolean(lazy); } } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeRequest.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeRequest.java index da56e20f4e6a..6c1734bde401 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeRequest.java @@ -56,7 +56,7 @@ class FieldCapabilitiesNodeRequest extends ActionRequest implements IndicesReque indexFilter = in.readOptionalNamedWriteable(QueryBuilder.class); nowInMillis = in.readLong(); runtimeFields = in.readGenericMap(); - if (in.getTransportVersion().onOrAfter(TransportVersions.FIELD_CAPS_FIELD_HAS_VALUE)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { includeEmptyFields = in.readBoolean(); } else { includeEmptyFields = true; @@ -144,7 +144,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalNamedWriteable(indexFilter); out.writeLong(nowInMillis); out.writeGenericMap(runtimeFields); - if (out.getTransportVersion().onOrAfter(TransportVersions.FIELD_CAPS_FIELD_HAS_VALUE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeBoolean(includeEmptyFields); } } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java index 4b1c256bdeb7..6fab92219511 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java @@ -63,7 +63,7 @@ public FieldCapabilitiesRequest(StreamInput in) throws IOException { filters = in.readStringArray(); types = in.readStringArray(); } - if (in.getTransportVersion().onOrAfter(TransportVersions.FIELD_CAPS_FIELD_HAS_VALUE)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { includeEmptyFields = in.readBoolean(); } } @@ -104,7 +104,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeStringArray(filters); out.writeStringArray(types); } - if (out.getTransportVersion().onOrAfter(TransportVersions.FIELD_CAPS_FIELD_HAS_VALUE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeBoolean(includeEmptyFields); } } diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java index 9d0eeb20dace..afe918c6853e 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -52,7 +52,6 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.TransportVersions.INDEX_REQUEST_NORMALIZED_BYTES_PARSED; import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; @@ -189,7 +188,7 @@ public IndexRequest(@Nullable ShardId shardId, StreamInput in) throws IOExceptio dynamicTemplates = in.readMap(StreamInput::readString); } if (in.getTransportVersion().onOrAfter(PIPELINES_HAVE_RUN_FIELD_ADDED) - && in.getTransportVersion().before(INDEX_REQUEST_NORMALIZED_BYTES_PARSED)) { + && in.getTransportVersion().before(TransportVersions.V_8_13_0)) { in.readBoolean(); } if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { @@ -201,14 +200,12 @@ public IndexRequest(@Nullable ShardId shardId, StreamInput in) throws IOExceptio : new ArrayList<>(possiblyImmutableExecutedPipelines); } } - if (in.getTransportVersion().onOrAfter(TransportVersions.REQUIRE_DATA_STREAM_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { requireDataStream = in.readBoolean(); + normalisedBytesParsed = in.readZLong(); } else { requireDataStream = false; } - if (in.getTransportVersion().onOrAfter(INDEX_REQUEST_NORMALIZED_BYTES_PARSED)) { - normalisedBytesParsed = in.readZLong(); - } } public IndexRequest() { @@ -772,7 +769,7 @@ private void writeBody(StreamOutput out) throws IOException { } } if (out.getTransportVersion().onOrAfter(PIPELINES_HAVE_RUN_FIELD_ADDED) - && out.getTransportVersion().before(INDEX_REQUEST_NORMALIZED_BYTES_PARSED)) { + && out.getTransportVersion().before(TransportVersions.V_8_13_0)) { out.writeBoolean(normalisedBytesParsed != -1L); } if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { @@ -782,10 +779,8 @@ private void writeBody(StreamOutput out) throws IOException { } } - if (out.getTransportVersion().onOrAfter(TransportVersions.REQUIRE_DATA_STREAM_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeBoolean(requireDataStream); - } - if (out.getTransportVersion().onOrAfter(INDEX_REQUEST_NORMALIZED_BYTES_PARSED)) { out.writeZLong(normalisedBytesParsed); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java b/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java index 1baa287830c7..532a33d07b25 100644 --- a/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java +++ b/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java @@ -53,8 +53,6 @@ import java.util.Set; import java.util.stream.Stream; -import static org.elasticsearch.TransportVersions.SNAPSHOTS_IN_PROGRESS_TRACKING_REMOVING_NODES_ADDED; - /** * Meta data about snapshots that are currently executing */ @@ -93,7 +91,7 @@ public SnapshotsInProgress(StreamInput in) throws IOException { } private static Set readNodeIdsForRemoval(StreamInput in) throws IOException { - return in.getTransportVersion().onOrAfter(SNAPSHOTS_IN_PROGRESS_TRACKING_REMOVING_NODES_ADDED) + return in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0) ? in.readCollectionAsImmutableSet(StreamInput::readString) : Set.of(); } @@ -246,7 +244,7 @@ public void writeTo(StreamOutput out) throws IOException { while (iterator.hasNext()) { iterator.next().writeTo(out); } - if (out.getTransportVersion().onOrAfter(SNAPSHOTS_IN_PROGRESS_TRACKING_REMOVING_NODES_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeStringCollection(nodesIdsForRemoval); } else { assert nodesIdsForRemoval.isEmpty() : nodesIdsForRemoval; @@ -433,7 +431,7 @@ private static boolean assertShardStateConsistent( * running shard snapshots. */ public SnapshotsInProgress withUpdatedNodeIdsForRemoval(ClusterState clusterState) { - assert clusterState.getMinTransportVersion().onOrAfter(TransportVersions.SNAPSHOTS_IN_PROGRESS_TRACKING_REMOVING_NODES_ADDED); + assert clusterState.getMinTransportVersion().onOrAfter(TransportVersions.V_8_13_0); final var updatedNodeIdsForRemoval = new HashSet<>(nodesIdsForRemoval); @@ -1709,7 +1707,7 @@ public void writeTo(StreamOutput out) throws IOException { } else { new SimpleDiffable.CompleteDiff<>(after).writeTo(out); } - if (out.getTransportVersion().onOrAfter(SNAPSHOTS_IN_PROGRESS_TRACKING_REMOVING_NODES_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeStringCollection(nodeIdsForRemoval); } else { assert nodeIdsForRemoval.isEmpty() : nodeIdsForRemoval; diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java b/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java index e81d8d73af9a..b46b79754be7 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/ClusterFormationFailureHelper.java @@ -241,7 +241,7 @@ public ClusterFormationState(StreamInput in) throws IOException { new VotingConfiguration(in), in.readCollectionAsImmutableList(TransportAddress::new), in.readCollectionAsImmutableList(DiscoveryNode::new), - in.getTransportVersion().onOrAfter(TransportVersions.PEERFINDER_REPORTS_PEERS_MASTERS) + in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0) ? in.readCollectionAsImmutableSet(DiscoveryNode::new) : Set.of(), in.readLong(), @@ -423,7 +423,7 @@ public void writeTo(StreamOutput out) throws IOException { lastCommittedConfiguration.writeTo(out); out.writeCollection(resolvedAddresses); out.writeCollection(foundPeers); - if (out.getTransportVersion().onOrAfter(TransportVersions.PEERFINDER_REPORTS_PEERS_MASTERS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeCollection(mastersOfPeers); } out.writeLong(currentTerm); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java index 16ad072f271f..e4b47b8d26c6 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java @@ -978,7 +978,7 @@ public DataStream(StreamInput in) throws IOException { in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X) ? in.readOptionalWriteable(DataStreamLifecycle::new) : null, in.getTransportVersion().onOrAfter(DataStream.ADDED_FAILURE_STORE_TRANSPORT_VERSION) ? in.readBoolean() : false, in.getTransportVersion().onOrAfter(DataStream.ADDED_FAILURE_STORE_TRANSPORT_VERSION) ? readIndices(in) : List.of(), - in.getTransportVersion().onOrAfter(TransportVersions.LAZY_ROLLOVER_ADDED) ? in.readBoolean() : false, + in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0) ? in.readBoolean() : false, in.getTransportVersion().onOrAfter(DataStream.ADDED_AUTO_SHARDING_EVENT_VERSION) ? in.readOptionalWriteable(DataStreamAutoShardingEvent::new) : null @@ -1022,7 +1022,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(failureStoreEnabled); out.writeCollection(failureIndices); } - if (out.getTransportVersion().onOrAfter(TransportVersions.LAZY_ROLLOVER_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeBoolean(rolloverOnWrite); } if (out.getTransportVersion().onOrAfter(DataStream.ADDED_AUTO_SHARDING_EVENT_VERSION)) { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java index 1ce950cf71f5..403b4b85e664 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNode.java @@ -204,7 +204,7 @@ public static DesiredNode readFrom(StreamInput in) throws IOException { final var memory = ByteSizeValue.readFrom(in); final var storage = ByteSizeValue.readFrom(in); final String version; - if (in.getTransportVersion().onOrAfter(TransportVersions.DESIRED_NODE_VERSION_OPTIONAL_STRING)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { version = in.readOptionalString(); } else { version = Version.readVersion(in).toString(); @@ -237,7 +237,7 @@ public void writeTo(StreamOutput out) throws IOException { } memory.writeTo(out); storage.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.DESIRED_NODE_VERSION_OPTIONAL_STRING)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeOptionalString(version); } else { Version parsedVersion = parseLegacyVersion(version); diff --git a/server/src/main/java/org/elasticsearch/health/node/HealthInfo.java b/server/src/main/java/org/elasticsearch/health/node/HealthInfo.java index 697c5eff939f..941e034a83de 100644 --- a/server/src/main/java/org/elasticsearch/health/node/HealthInfo.java +++ b/server/src/main/java/org/elasticsearch/health/node/HealthInfo.java @@ -39,9 +39,7 @@ public HealthInfo(StreamInput input) throws IOException { input.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0) ? input.readOptionalWriteable(DataStreamLifecycleHealthInfo::new) : null, - input.getTransportVersion().onOrAfter(TransportVersions.HEALTH_INFO_ENRICHED_WITH_REPOS) - ? input.readMap(RepositoriesHealthInfo::new) - : Map.of() + input.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0) ? input.readMap(RepositoriesHealthInfo::new) : Map.of() ); } @@ -51,7 +49,7 @@ public void writeTo(StreamOutput output) throws IOException { if (output.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { output.writeOptionalWriteable(dslHealthInfo); } - if (output.getTransportVersion().onOrAfter(TransportVersions.HEALTH_INFO_ENRICHED_WITH_REPOS)) { + if (output.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { output.writeMap(repositoriesInfoByNode, StreamOutput::writeWriteable); } } diff --git a/server/src/main/java/org/elasticsearch/health/node/UpdateHealthInfoCacheAction.java b/server/src/main/java/org/elasticsearch/health/node/UpdateHealthInfoCacheAction.java index f780de078527..b0dc5958c7ed 100644 --- a/server/src/main/java/org/elasticsearch/health/node/UpdateHealthInfoCacheAction.java +++ b/server/src/main/java/org/elasticsearch/health/node/UpdateHealthInfoCacheAction.java @@ -71,7 +71,7 @@ public Request(StreamInput in) throws IOException { if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { this.diskHealthInfo = in.readOptionalWriteable(DiskHealthInfo::new); this.dslHealthInfo = in.readOptionalWriteable(DataStreamLifecycleHealthInfo::new); - this.repositoriesHealthInfo = in.getTransportVersion().onOrAfter(TransportVersions.HEALTH_INFO_ENRICHED_WITH_REPOS) + this.repositoriesHealthInfo = in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0) ? in.readOptionalWriteable(RepositoriesHealthInfo::new) : null; } else { @@ -113,13 +113,13 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { out.writeOptionalWriteable(diskHealthInfo); out.writeOptionalWriteable(dslHealthInfo); - if (out.getTransportVersion().onOrAfter(TransportVersions.HEALTH_INFO_ENRICHED_WITH_REPOS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeOptionalWriteable(repositoriesHealthInfo); } } else { // BWC for pre-8.12 the disk health info was mandatory. Evolving this request has proven tricky however we've made use of - // waiting for all nodes to be on the {@link TransportVersions.HEALTH_INFO_ENRICHED_WITH_DSL_STATUS} transport version - // before sending any requests to update the health info that'd break the pre HEALTH_INFO_ENRICHED_WITH_DSL_STATUS + // waiting for all nodes to be on the {@link TransportVersions.V_8_12_0} transport version + // before sending any requests to update the health info that'd break the pre-8.12 // transport invariant of always having a disk health information in the request diskHealthInfo.writeTo(out); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java index f29850a306b7..e75b2d2002b0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java @@ -236,11 +236,7 @@ public int hashCode() { } boolean versionSupportsDownsamplingTimezone(TransportVersion version) { - return version.onOrAfter(TransportVersions.DATE_HISTOGRAM_SUPPORT_DOWNSAMPLED_TZ) - || version.between( - TransportVersions.DATE_HISTOGRAM_SUPPORT_DOWNSAMPLED_TZ_8_12_PATCH, - TransportVersions.NODE_STATS_REQUEST_SIMPLIFIED - ); + return version.onOrAfter(TransportVersions.V_8_13_0) || version.isPatchFrom(TransportVersions.V_8_12_1); } /** diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilder.java b/server/src/main/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilder.java index 60b0d259961d..1f05b215699b 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ExactKnnQueryBuilder.java @@ -128,6 +128,6 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.NESTED_KNN_MORE_INNER_HITS; + return TransportVersions.V_8_13_0; } } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java b/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java index 6de6338b604e..65f8c60297ad 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilder.java @@ -63,7 +63,7 @@ public KnnScoreDocQueryBuilder(ScoreDoc[] scoreDocs, String fieldName, VectorDat public KnnScoreDocQueryBuilder(StreamInput in) throws IOException { super(in); this.scoreDocs = in.readArray(Lucene::readScoreDoc, ScoreDoc[]::new); - if (in.getTransportVersion().onOrAfter(TransportVersions.NESTED_KNN_MORE_INNER_HITS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { this.fieldName = in.readOptionalString(); if (in.readBoolean()) { if (in.getTransportVersion().onOrAfter(TransportVersions.KNN_EXPLICIT_BYTE_QUERY_VECTOR_PARSING)) { @@ -100,7 +100,7 @@ VectorData queryVector() { @Override protected void doWriteTo(StreamOutput out) throws IOException { out.writeArray(Lucene::writeScoreDoc, scoreDocs); - if (out.getTransportVersion().onOrAfter(TransportVersions.NESTED_KNN_MORE_INNER_HITS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeOptionalString(fieldName); if (queryVector != null) { out.writeBoolean(true); diff --git a/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java b/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java index aa5daa532cf4..0c8dfc9a9833 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/KnnVectorQueryBuilder.java @@ -167,7 +167,7 @@ private KnnVectorQueryBuilder( public KnnVectorQueryBuilder(StreamInput in) throws IOException { super(in); this.fieldName = in.readString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.KNN_QUERY_NUMCANDS_AS_OPTIONAL_PARAM)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { this.numCands = in.readOptionalVInt(); } else { this.numCands = in.readVInt(); @@ -245,7 +245,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { throw new IllegalStateException("missing a rewriteAndFetch?"); } out.writeString(fieldName); - if (out.getTransportVersion().onOrAfter(TransportVersions.KNN_QUERY_NUMCANDS_AS_OPTIONAL_PARAM)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeOptionalVInt(numCands); } else { if (numCands == null) { diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index d505a6ded480..dd8ddcffd5fe 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -4126,7 +4126,7 @@ static ClusterState executeBatch( } private static boolean supportsNodeRemovalTracking(ClusterState clusterState) { - return clusterState.getMinTransportVersion().onOrAfter(TransportVersions.SNAPSHOTS_IN_PROGRESS_TRACKING_REMOVING_NODES_ADDED); + return clusterState.getMinTransportVersion().onOrAfter(TransportVersions.V_8_13_0); } private final MasterServiceTaskQueue updateNodeIdsToRemoveQueue; diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveClusterActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveClusterActionTests.java index 2a64fbad9757..d76bfc03e1d7 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveClusterActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/resolve/TransportResolveClusterActionTests.java @@ -67,8 +67,8 @@ public void testCCSCompatibilityCheck() { @Override public void writeTo(StreamOutput out) throws IOException { throw new UnsupportedOperationException( - "ResolveClusterAction requires at least Transport Version " - + TransportVersions.RESOLVE_CLUSTER_ENDPOINT_ADDED.toReleaseVersion() + "ResolveClusterAction requires at least version " + + TransportVersions.V_8_13_0.toReleaseVersion() + " but was " + out.getTransportVersion().toReleaseVersion() ); @@ -99,7 +99,7 @@ public void writeTo(StreamOutput out) throws IOException { assertThat(ex.getMessage(), containsString("not compatible with version")); assertThat(ex.getMessage(), containsString("and the 'search.check_ccs_compatibility' setting is enabled.")); - assertThat(ex.getCause().getMessage(), containsString("ResolveClusterAction requires at least Transport Version")); + assertThat(ex.getCause().getMessage(), containsString("ResolveClusterAction requires at least version")); } finally { assertTrue(ESTestCase.terminate(threadPool)); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/GetAsyncStatusRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/GetAsyncStatusRequest.java index 9a623ede96f0..3581b9db1988 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/GetAsyncStatusRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/GetAsyncStatusRequest.java @@ -34,7 +34,7 @@ public GetAsyncStatusRequest(String id) { public GetAsyncStatusRequest(StreamInput in) throws IOException { super(in); this.id = in.readString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.ASYNC_SEARCH_STATUS_SUPPORTS_KEEP_ALIVE)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { this.keepAlive = in.readTimeValue(); } } @@ -43,7 +43,7 @@ public GetAsyncStatusRequest(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(id); - if (out.getTransportVersion().onOrAfter(TransportVersions.ASYNC_SEARCH_STATUS_SUPPORTS_KEEP_ALIVE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeTimeValue(keepAlive); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java index 05fcdadda05a..cfd4da0d59e3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/InferenceAction.java @@ -114,7 +114,7 @@ public Request(StreamInput in) throws IOException { this.input = List.of(in.readString()); } this.taskSettings = in.readGenericMap(); - if (in.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { this.inputType = in.readEnum(InputType.class); } else { this.inputType = InputType.UNSPECIFIED; @@ -187,9 +187,8 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(input.get(0)); } out.writeGenericMap(taskSettings); - // in version ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED the input type enum was added, so we only want to write the enum if we're - // at that version or later - if (out.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED)) { + + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeEnum(getInputTypeToWrite(inputType, out.getTransportVersion())); } @@ -204,13 +203,13 @@ public void writeTo(StreamOutput out) throws IOException { // default for easier testing static InputType getInputTypeToWrite(InputType inputType, TransportVersion version) { - if (version.before(TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_UNSPECIFIED_ADDED) - && validEnumsBeforeUnspecifiedAdded.contains(inputType) == false) { - return InputType.INGEST; - } else if (version.before(TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_CLASS_CLUSTER_ADDED) - && validEnumsBeforeClassificationClusteringAdded.contains(inputType) == false) { + if (version.before(TransportVersions.V_8_13_0)) { + if (validEnumsBeforeUnspecifiedAdded.contains(inputType) == false) { + return InputType.INGEST; + } else if (validEnumsBeforeClassificationClusteringAdded.contains(inputType) == false) { return InputType.UNSPECIFIED; } + } return inputType; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceAction.java index 00064138f036..2984c203ded3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceAction.java @@ -133,10 +133,10 @@ public Request(StreamInput in) throws IOException { this.previouslyLicensed = in.readOptionalBoolean(); this.inferenceTimeout = in.readOptionalTimeValue(); this.highPriority = in.readBoolean(); - // The prefixType was added prior to TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED but we're serializing it now + // The prefixType was added prior to TransportVersions.V_8_13_0 but we're serializing it now // as a safety measure. At the time of writing this it doesn't have to be serialized because this class is only used internally // and on a single node so it never actually gets serialized. But we'll do it just in case that changes in the future. - if (in.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { this.prefixType = in.readEnum(TrainedModelPrefixStrings.PrefixType.class); } } @@ -209,7 +209,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalBoolean(previouslyLicensed); out.writeOptionalTimeValue(inferenceTimeout); out.writeBoolean(highPriority); - if (out.getTransportVersion().onOrAfter(TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeEnum(prefixType); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentAction.java index 0be6e152d907..eb41ff4ce870 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/InferTrainedModelDeploymentAction.java @@ -138,7 +138,7 @@ public Request(StreamInput in) throws IOException { } else { prefixType = TrainedModelPrefixStrings.PrefixType.NONE; } - if (in.getTransportVersion().onOrAfter(TransportVersions.NLP_DOCUMENT_CHUNKING_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { chunkResults = in.readBoolean(); } else { chunkResults = false; @@ -232,7 +232,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_12_0)) { out.writeEnum(prefixType); } - if (out.getTransportVersion().onOrAfter(TransportVersions.NLP_DOCUMENT_CHUNKING_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeBoolean(chunkResults); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsAction.java index 0333322d2acc..6fe4427b1065 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformStatsAction.java @@ -77,7 +77,7 @@ public Request(StreamInput in) throws IOException { expandedIds = in.readCollectionAsImmutableList(StreamInput::readString); pageParams = new PageParams(in); allowNoMatch = in.readBoolean(); - if (in.getTransportVersion().onOrAfter(TransportVersions.TRANSFORM_GET_BASIC_STATS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { basic = in.readBoolean(); } else { basic = false; @@ -130,7 +130,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeStringCollection(expandedIds); pageParams.writeTo(out); out.writeBoolean(allowNoMatch); - if (out.getTransportVersion().onOrAfter(TransportVersions.TRANSFORM_GET_BASIC_STATS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeBoolean(basic); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java index cef2d710237c..476167c5db0f 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/InferenceActionRequestTests.java @@ -192,7 +192,7 @@ protected InferenceAction.Request mutateInstanceForVersion(InferenceAction.Reque InputType.UNSPECIFIED, InferenceAction.Request.DEFAULT_TIMEOUT ); - } else if (version.before(TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED)) { + } else if (version.before(TransportVersions.V_8_13_0)) { return new InferenceAction.Request( instance.getTaskType(), instance.getInferenceEntityId(), @@ -202,7 +202,7 @@ protected InferenceAction.Request mutateInstanceForVersion(InferenceAction.Reque InputType.UNSPECIFIED, InferenceAction.Request.DEFAULT_TIMEOUT ); - } else if (version.before(TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_UNSPECIFIED_ADDED) + } else if (version.before(TransportVersions.V_8_13_0) && (instance.getInputType() == InputType.UNSPECIFIED || instance.getInputType() == InputType.CLASSIFICATION || instance.getInputType() == InputType.CLUSTERING)) { @@ -215,7 +215,7 @@ protected InferenceAction.Request mutateInstanceForVersion(InferenceAction.Reque InputType.INGEST, InferenceAction.Request.DEFAULT_TIMEOUT ); - } else if (version.before(TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_CLASS_CLUSTER_ADDED) + } else if (version.before(TransportVersions.V_8_13_0) && (instance.getInputType() == InputType.CLUSTERING || instance.getInputType() == InputType.CLASSIFICATION)) { return new InferenceAction.Request( instance.getTaskType(), @@ -262,138 +262,10 @@ public void testWriteTo_WhenVersionIsOnAfterUnspecifiedAdded() throws IOExceptio InputType.UNSPECIFIED, InferenceAction.Request.DEFAULT_TIMEOUT ), - TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_UNSPECIFIED_ADDED + TransportVersions.V_8_13_0 ); } - public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdded_ShouldSetToIngest() throws IOException { - assertBwcSerialization( - new InferenceAction.Request( - TaskType.TEXT_EMBEDDING, - "model", - null, - List.of(), - Map.of(), - InputType.UNSPECIFIED, - InferenceAction.Request.DEFAULT_TIMEOUT - ), - TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED - ); - } - - public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdded_ShouldSetToIngest_ManualCheck() throws IOException { - var instance = new InferenceAction.Request( - TaskType.TEXT_EMBEDDING, - "model", - null, - List.of(), - Map.of(), - InputType.UNSPECIFIED, - InferenceAction.Request.DEFAULT_TIMEOUT - ); - - InferenceAction.Request deserializedInstance = copyWriteable( - instance, - getNamedWriteableRegistry(), - instanceReader(), - TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED - ); - - assertThat(deserializedInstance.getInputType(), is(InputType.INGEST)); - } - - public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdded_ShouldSetToIngest_WhenClustering_ManualCheck() - throws IOException { - var instance = new InferenceAction.Request( - TaskType.TEXT_EMBEDDING, - "model", - null, - List.of(), - Map.of(), - InputType.CLUSTERING, - InferenceAction.Request.DEFAULT_TIMEOUT - ); - - InferenceAction.Request deserializedInstance = copyWriteable( - instance, - getNamedWriteableRegistry(), - instanceReader(), - TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED - ); - - assertThat(deserializedInstance.getInputType(), is(InputType.INGEST)); - } - - public void testWriteTo_WhenVersionIsBeforeUnspecifiedAdded_ButAfterInputTypeAdded_ShouldSetToIngest_WhenClassification_ManualCheck() - throws IOException { - var instance = new InferenceAction.Request( - TaskType.TEXT_EMBEDDING, - "model", - null, - List.of(), - Map.of(), - InputType.CLASSIFICATION, - InferenceAction.Request.DEFAULT_TIMEOUT - ); - - InferenceAction.Request deserializedInstance = copyWriteable( - instance, - getNamedWriteableRegistry(), - instanceReader(), - TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED - ); - - assertThat(deserializedInstance.getInputType(), is(InputType.INGEST)); - } - - public - void - testWriteTo_WhenVersionIsBeforeClusterClassAdded_ButAfterUnspecifiedAdded_ShouldSetToUnspecified_WhenClassification_ManualCheck() - throws IOException { - var instance = new InferenceAction.Request( - TaskType.TEXT_EMBEDDING, - "model", - null, - List.of(), - Map.of(), - InputType.CLASSIFICATION, - InferenceAction.Request.DEFAULT_TIMEOUT - ); - - InferenceAction.Request deserializedInstance = copyWriteable( - instance, - getNamedWriteableRegistry(), - instanceReader(), - TransportVersions.ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED - ); - - assertThat(deserializedInstance.getInputType(), is(InputType.UNSPECIFIED)); - } - - public - void - testWriteTo_WhenVersionIsBeforeClusterClassAdded_ButAfterUnspecifiedAdded_ShouldSetToUnspecified_WhenClustering_ManualCheck() - throws IOException { - var instance = new InferenceAction.Request( - TaskType.TEXT_EMBEDDING, - "model", - null, - List.of(), - Map.of(), - InputType.CLUSTERING, - InferenceAction.Request.DEFAULT_TIMEOUT - ); - - InferenceAction.Request deserializedInstance = copyWriteable( - instance, - getNamedWriteableRegistry(), - instanceReader(), - TransportVersions.ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED - ); - - assertThat(deserializedInstance.getInputType(), is(InputType.UNSPECIFIED)); - } - public void testWriteTo_WhenVersionIsBeforeInputTypeAdded_ShouldSetInputTypeToUnspecified() throws IOException { var instance = new InferenceAction.Request( TaskType.TEXT_EMBEDDING, @@ -409,44 +281,21 @@ public void testWriteTo_WhenVersionIsBeforeInputTypeAdded_ShouldSetInputTypeToUn instance, getNamedWriteableRegistry(), instanceReader(), - TransportVersions.HOT_THREADS_AS_BYTES + TransportVersions.V_8_12_1 ); assertThat(deserializedInstance.getInputType(), is(InputType.UNSPECIFIED)); } public void testGetInputTypeToWrite_ReturnsIngest_WhenInputTypeIsUnspecified_VersionBeforeUnspecifiedIntroduced() { - assertThat( - getInputTypeToWrite(InputType.UNSPECIFIED, TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED), - is(InputType.INGEST) - ); + assertThat(getInputTypeToWrite(InputType.UNSPECIFIED, TransportVersions.V_8_12_1), is(InputType.INGEST)); } public void testGetInputTypeToWrite_ReturnsIngest_WhenInputTypeIsClassification_VersionBeforeUnspecifiedIntroduced() { - assertThat( - getInputTypeToWrite(InputType.CLASSIFICATION, TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED), - is(InputType.INGEST) - ); + assertThat(getInputTypeToWrite(InputType.CLASSIFICATION, TransportVersions.V_8_12_1), is(InputType.INGEST)); } public void testGetInputTypeToWrite_ReturnsIngest_WhenInputTypeIsClustering_VersionBeforeUnspecifiedIntroduced() { - assertThat( - getInputTypeToWrite(InputType.CLUSTERING, TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED), - is(InputType.INGEST) - ); - } - - public void testGetInputTypeToWrite_ReturnsUnspecified_WhenInputTypeIsClassification_VersionBeforeClusteringClassIntroduced() { - assertThat( - getInputTypeToWrite(InputType.CLUSTERING, TransportVersions.ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED), - is(InputType.UNSPECIFIED) - ); - } - - public void testGetInputTypeToWrite_ReturnsUnspecified_WhenInputTypeIsClustering_VersionBeforeClusteringClassIntroduced() { - assertThat( - getInputTypeToWrite(InputType.CLASSIFICATION, TransportVersions.ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED), - is(InputType.UNSPECIFIED) - ); + assertThat(getInputTypeToWrite(InputType.CLUSTERING, TransportVersions.V_8_12_1), is(InputType.INGEST)); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java index 9c435bd37b2c..3ab585181547 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CoordinatedInferenceActionRequestTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.core.ml.action; import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; @@ -22,23 +23,21 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.elasticsearch.TransportVersions.ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED; -import static org.elasticsearch.TransportVersions.UPDATE_API_KEY_EXPIRATION_TIME_ADDED; import static org.hamcrest.Matchers.is; public class CoordinatedInferenceActionRequestTests extends AbstractBWCWireSerializationTestCase { public void testSerializesPrefixType_WhenTransportVersionIs_InputTypeAdded() throws IOException { var instance = createTestInstance(); instance.setPrefixType(TrainedModelPrefixStrings.PrefixType.INGEST); - var copy = copyWriteable(instance, getNamedWriteableRegistry(), instanceReader(), ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED); - assertOnBWCObject(copy, instance, ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED); + var copy = copyWriteable(instance, getNamedWriteableRegistry(), instanceReader(), TransportVersions.V_8_13_0); + assertOnBWCObject(copy, instance, TransportVersions.V_8_13_0); assertThat(copy.getPrefixType(), is(TrainedModelPrefixStrings.PrefixType.INGEST)); } public void testSerializesPrefixType_DoesNotSerialize_WhenTransportVersion_IsPriorToInputTypeAdded() throws IOException { var instance = createTestInstance(); instance.setPrefixType(TrainedModelPrefixStrings.PrefixType.INGEST); - var copy = copyWriteable(instance, getNamedWriteableRegistry(), instanceReader(), UPDATE_API_KEY_EXPIRATION_TIME_ADDED); + var copy = copyWriteable(instance, getNamedWriteableRegistry(), instanceReader(), TransportVersions.V_8_12_1); assertNotSame(copy, instance); assertNotEquals(copy, instance); @@ -117,7 +116,7 @@ protected CoordinatedInferenceAction.Request mutateInstanceForVersion( CoordinatedInferenceAction.Request instance, TransportVersion version ) { - if (version.before(ML_INFERENCE_REQUEST_INPUT_TYPE_ADDED)) { + if (version.before(TransportVersions.V_8_13_0)) { instance.setPrefixType(TrainedModelPrefixStrings.PrefixType.NONE); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java index fae0a86762b9..10c78be15bd8 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java @@ -295,7 +295,7 @@ private Status(LuceneOperator operator) { Status(StreamInput in) throws IOException { processedSlices = in.readVInt(); - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_STATUS_INCLUDE_LUCENE_QUERIES)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { processedQueries = in.readCollectionAsSet(StreamInput::readString); processedShards = in.readCollectionAsSet(StreamInput::readString); } else { @@ -314,7 +314,7 @@ private Status(LuceneOperator operator) { @Override public void writeTo(StreamOutput out) throws IOException { out.writeVInt(processedSlices); - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_STATUS_INCLUDE_LUCENE_QUERIES)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeCollection(processedQueries, StreamOutput::writeString); out.writeCollection(processedShards, StreamOutput::writeString); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java index 49a0307a6599..fdf39545a396 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java @@ -92,7 +92,7 @@ static EsqlQueryResponse deserialize(BlockStreamInput in) throws IOException { boolean isRunning = false; boolean isAsync = false; Profile profile = null; - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ASYNC_QUERY)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { asyncExecutionId = in.readOptionalString(); isRunning = in.readBoolean(); isAsync = in.readBoolean(); @@ -108,7 +108,7 @@ static EsqlQueryResponse deserialize(BlockStreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ASYNC_QUERY)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeOptionalString(asyncExecutionId); out.writeBoolean(isRunning); out.writeBoolean(isAsync); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index 0cfffb128e0c..4a5748f26a07 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -540,7 +540,7 @@ static EnrichExec readEnrichExec(PlanStreamInput in) throws IOException { final String policyMatchField = in.readString(); final Map concreteIndices; final Enrich.Mode mode; - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_MULTI_CLUSTERS_ENRICH)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { mode = in.readEnum(Enrich.Mode.class); concreteIndices = in.readMap(StreamInput::readString, StreamInput::readString); } else { @@ -573,7 +573,7 @@ static void writeEnrichExec(PlanStreamOutput out, EnrichExec enrich) throws IOEx out.writeString(enrich.matchType()); } out.writeString(enrich.policyMatchField()); - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_MULTI_CLUSTERS_ENRICH)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeEnum(enrich.mode()); out.writeMap(enrich.concreteIndices(), StreamOutput::writeString, StreamOutput::writeString); } else { @@ -824,19 +824,19 @@ static void writeEval(PlanStreamOutput out, Eval eval) throws IOException { static Enrich readEnrich(PlanStreamInput in) throws IOException { Enrich.Mode mode = Enrich.Mode.ANY; - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_POLICY_CCQ_MODE)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { mode = in.readEnum(Enrich.Mode.class); } final Source source = in.readSource(); final LogicalPlan child = in.readLogicalPlanNode(); final Expression policyName = in.readExpression(); final NamedExpression matchField = in.readNamedExpression(); - if (in.getTransportVersion().before(TransportVersions.ESQL_MULTI_CLUSTERS_ENRICH)) { + if (in.getTransportVersion().before(TransportVersions.V_8_13_0)) { in.readString(); // discard the old policy name } final EnrichPolicy policy = new EnrichPolicy(in); final Map concreteIndices; - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_MULTI_CLUSTERS_ENRICH)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { concreteIndices = in.readMap(StreamInput::readString, StreamInput::readString); } else { EsIndex esIndex = readEsIndex(in); @@ -849,7 +849,7 @@ static Enrich readEnrich(PlanStreamInput in) throws IOException { } static void writeEnrich(PlanStreamOutput out, Enrich enrich) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ENRICH_POLICY_CCQ_MODE)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeEnum(enrich.mode()); } @@ -857,11 +857,11 @@ static void writeEnrich(PlanStreamOutput out, Enrich enrich) throws IOException out.writeLogicalPlanNode(enrich.child()); out.writeExpression(enrich.policyName()); out.writeNamedExpression(enrich.matchField()); - if (out.getTransportVersion().before(TransportVersions.ESQL_MULTI_CLUSTERS_ENRICH)) { + if (out.getTransportVersion().before(TransportVersions.V_8_13_0)) { out.writeString(BytesRefs.toString(enrich.policyName().fold())); // old policy name } enrich.policy().writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_MULTI_CLUSTERS_ENRICH)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeMap(enrich.concreteIndices(), StreamOutput::writeString, StreamOutput::writeString); } else { Map concreteIndices = enrich.concreteIndices(); @@ -1819,8 +1819,8 @@ static void writeLiteral(PlanStreamOutput out, Literal literal) throws IOExcepti */ private static Object mapFromLiteralValue(PlanStreamOutput out, DataType dataType, Object value) { if (dataType == GEO_POINT || dataType == CARTESIAN_POINT) { - // In 8.12.0 and earlier builds of 8.13 (pre-release) we serialized point literals as encoded longs, but now use WKB - if (out.getTransportVersion().before(TransportVersions.ESQL_PLAN_POINT_LITERAL_WKB)) { + // In 8.12.0 we serialized point literals as encoded longs, but now use WKB + if (out.getTransportVersion().before(TransportVersions.V_8_13_0)) { if (value instanceof List list) { return list.stream().map(v -> mapFromLiteralValue(out, dataType, v)).toList(); } @@ -1836,8 +1836,8 @@ private static Object mapFromLiteralValue(PlanStreamOutput out, DataType dataTyp */ private static Object mapToLiteralValue(PlanStreamInput in, DataType dataType, Object value) { if (dataType == GEO_POINT || dataType == CARTESIAN_POINT) { - // In 8.12.0 and earlier builds of 8.13 (pre-release) we serialized point literals as encoded longs, but now use WKB - if (in.getTransportVersion().before(TransportVersions.ESQL_PLAN_POINT_LITERAL_WKB)) { + // In 8.12.0 we serialized point literals as encoded longs, but now use WKB + if (in.getTransportVersion().before(TransportVersions.V_8_13_0)) { if (value instanceof List list) { return list.stream().map(v -> mapToLiteralValue(in, dataType, v)).toList(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequest.java index b72feadd20c6..ab2df4a2ba6a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequest.java @@ -70,7 +70,7 @@ final class DataNodeRequest extends TransportRequest implements IndicesRequest { // TODO make EsqlConfiguration Releasable new BlockStreamInput(in, new BlockFactory(new NoopCircuitBreaker(CircuitBreaker.REQUEST), BigArrays.NON_RECYCLING_INSTANCE)) ); - if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_CLUSTER_ALIAS)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { this.clusterAlias = in.readString(); } else { this.clusterAlias = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY; @@ -85,7 +85,7 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(sessionId); configuration.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_CLUSTER_ALIAS)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeString(clusterAlias); } out.writeCollection(shardIds); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java index 4c39d35e2ff0..d55615e9df48 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java @@ -202,7 +202,7 @@ public XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder builder @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_COHERE_EMBEDDINGS_ADDED; + return TransportVersions.V_8_13_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java index 00a406a7a3ef..685dac0f3877 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java @@ -169,7 +169,7 @@ protected XContentBuilder toXContentFragmentOfExposedFields(XContentBuilder buil @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_COHERE_EMBEDDINGS_ADDED; + return TransportVersions.V_8_13_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettings.java index 134cb29862e6..0a42df8c0bb4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsTaskSettings.java @@ -174,7 +174,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.ML_INFERENCE_COHERE_EMBEDDINGS_ADDED; + return TransportVersions.V_8_13_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java index 86ac5bbaaa27..ba98090c9252 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandInternalServiceSettings.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.inference.services.elasticsearch; import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -17,8 +18,6 @@ import java.io.IOException; import java.util.Map; -import static org.elasticsearch.TransportVersions.ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED; - public class CustomElandInternalServiceSettings extends ElasticsearchInternalServiceSettings { public static final String NAME = "custom_eland_model_internal_service_settings"; @@ -86,7 +85,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED; + return TransportVersions.V_8_13_0; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java index f6458b48f99f..a384dfe9a2c9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceSettings.java @@ -8,13 +8,12 @@ package org.elasticsearch.xpack.inference.services.elasticsearch; import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.xpack.inference.services.settings.InternalServiceSettings; import java.io.IOException; -import static org.elasticsearch.TransportVersions.ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED; - public class ElasticsearchInternalServiceSettings extends InternalServiceSettings { public static final String NAME = "text_embedding_internal_service_settings"; @@ -34,7 +33,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED; + return TransportVersions.V_8_13_0; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java index 3347917bab2b..d514ca6a917d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/MultilingualE5SmallInternalServiceSettings.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.services.elasticsearch; -import org.elasticsearch.TransportVersion; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -21,8 +20,6 @@ import java.util.Arrays; import java.util.Map; -import static org.elasticsearch.TransportVersions.ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED; - public class MultilingualE5SmallInternalServiceSettings extends ElasticsearchInternalServiceSettings { public static final String NAME = "multilingual_e5_small_service_settings"; @@ -104,11 +101,6 @@ public String getWriteableName() { return MultilingualE5SmallInternalServiceSettings.NAME; } - @Override - public TransportVersion getMinimalSupportedVersion() { - return ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED; - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java index 690e8f0ddd94..fc479009d333 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsServiceSettings.java @@ -177,14 +177,11 @@ public OpenAiEmbeddingsServiceSettings(StreamInput in) throws IOException { maxInputTokens = null; } - if (in.getTransportVersion().onOrAfter(TransportVersions.ML_DIMENSIONS_SET_BY_USER_ADDED)) { - dimensionsSetByUser = in.readBoolean(); - } else { - dimensionsSetByUser = false; - } if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { + dimensionsSetByUser = in.readBoolean(); modelId = in.readString(); } else { + dimensionsSetByUser = false; modelId = "unset"; } @@ -310,10 +307,8 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalVInt(maxInputTokens); } - if (out.getTransportVersion().onOrAfter(TransportVersions.ML_DIMENSIONS_SET_BY_USER_ADDED)) { - out.writeBoolean(dimensionsSetByUser); - } if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { + out.writeBoolean(dimensionsSetByUser); out.writeString(modelId); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java index f6fa7ca9005c..571d9b89a32d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java @@ -101,7 +101,7 @@ public TextExpansionQueryBuilder(StreamInput in) throws IOException { this.fieldName = in.readString(); this.modelText = in.readString(); this.modelId = in.readString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.TEXT_EXPANSION_TOKEN_PRUNING_CONFIG_ADDED)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { this.tokenPruningConfig = in.readOptionalWriteable(TokenPruningConfig::new); } else { this.tokenPruningConfig = null; @@ -144,7 +144,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeString(fieldName); out.writeString(modelText); out.writeString(modelId); - if (out.getTransportVersion().onOrAfter(TransportVersions.TEXT_EXPANSION_TOKEN_PRUNING_CONFIG_ADDED)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { out.writeOptionalWriteable(tokenPruningConfig); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/WeightedTokensQueryBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/WeightedTokensQueryBuilder.java index 51139881fc2e..1e96cb293be3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/WeightedTokensQueryBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/WeightedTokensQueryBuilder.java @@ -221,7 +221,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.TEXT_EXPANSION_TOKEN_PRUNING_CONFIG_ADDED; + return TransportVersions.V_8_13_0; } private static float parseWeight(String token, Object weight) throws IOException { diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/DeleteShutdownNodeAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/DeleteShutdownNodeAction.java index 4f7b16380d0f..4446e0aeae4d 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/DeleteShutdownNodeAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/DeleteShutdownNodeAction.java @@ -37,7 +37,7 @@ public Request(String nodeId) { } public Request(StreamInput in) throws IOException { - if (in.getTransportVersion().isPatchFrom(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_13) + if (in.getTransportVersion().isPatchFrom(TransportVersions.V_8_13_4) || in.getTransportVersion().isPatchFrom(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_14) || in.getTransportVersion().onOrAfter(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX)) { // effectively super(in): @@ -50,7 +50,7 @@ public Request(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().isPatchFrom(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_13) + if (out.getTransportVersion().isPatchFrom(TransportVersions.V_8_13_4) || out.getTransportVersion().isPatchFrom(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_14) || out.getTransportVersion().onOrAfter(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX)) { super.writeTo(out); diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/PutShutdownNodeAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/PutShutdownNodeAction.java index bff2b0b1793b..8356285c10d0 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/PutShutdownNodeAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/PutShutdownNodeAction.java @@ -100,7 +100,7 @@ public Request( @UpdateForV9 // TODO call super(in) instead of explicitly reading superclass contents once bwc no longer needed public Request(StreamInput in) throws IOException { - if (in.getTransportVersion().isPatchFrom(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_13) + if (in.getTransportVersion().isPatchFrom(TransportVersions.V_8_13_4) || in.getTransportVersion().isPatchFrom(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_14) || in.getTransportVersion().onOrAfter(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX)) { // effectively super(in): @@ -126,7 +126,7 @@ public Request(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().isPatchFrom(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_13) + if (out.getTransportVersion().isPatchFrom(TransportVersions.V_8_13_4) || out.getTransportVersion().isPatchFrom(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_14) || out.getTransportVersion().onOrAfter(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX)) { super.writeTo(out); From 5fbeff162c6e1ef6b26f3801bd84427229e79687 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 13 May 2024 10:45:39 +0100 Subject: [PATCH 078/119] Implement `toString()` for TMNA inner classes (#108544) These things appear in logs sometimes, but the default `toString()` is not very useful because it doesn't identify the action. This commit implements a more useful `toString()`. --- .../master/TransportMasterNodeAction.java | 17 +++++++++++++- .../TransportMasterNodeActionTests.java | 22 +++++++++++++++++-- 2 files changed, 36 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java b/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java index 0dbe66822d31..e88ebbdc0768 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.MasterService; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.TimeValue; @@ -322,11 +323,25 @@ public void onTimeout(TimeValue timeout) { logger.debug(() -> format("timed out while retrying [%s] after failure (timeout [%s])", actionName, timeout), failure); listener.onFailure(new MasterNotDiscoveredException(failure)); } + + @Override + public String toString() { + return Strings.format( + "listener for [%s] retrying after cluster state version [%d]", + AsyncSingleAction.this, + currentStateVersion + ); + } }, clusterState -> isTaskCancelled() || statePredicate.test(clusterState)); } private boolean isTaskCancelled() { - return task instanceof CancellableTask && ((CancellableTask) task).isCancelled(); + return task instanceof CancellableTask cancellableTask && cancellableTask.isCancelled(); + } + + @Override + public String toString() { + return Strings.format("execution of [%s]", task); } } } diff --git a/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java b/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java index b873bec2bd42..94e0ce1ccaf1 100644 --- a/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java @@ -7,6 +7,7 @@ */ package org.elasticsearch.action.support.master; +import org.apache.logging.log4j.Level; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; @@ -36,6 +37,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterApplierService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -58,6 +60,8 @@ import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.MockLogAppender; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.CapturingTransport; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -475,6 +479,7 @@ public void testMasterNotAvailable() throws ExecutionException, InterruptedExcep assertFalse(request.hasReferences()); } + @TestLogging(reason = "testing TRACE logging", value = "org.elasticsearch.cluster.service:TRACE") public void testMasterBecomesAvailable() throws ExecutionException, InterruptedException { Request request = new Request(); if (randomBoolean()) { @@ -482,11 +487,24 @@ public void testMasterBecomesAvailable() throws ExecutionException, InterruptedE } setState(clusterService, ClusterStateCreationUtils.state(localNode, null, allNodes)); PlainActionFuture listener = new PlainActionFuture<>(); - ActionTestUtils.execute(new Action("internal:testAction", transportService, clusterService, threadPool), null, request, listener); + final var task = new Task(randomNonNegativeLong(), "test", "internal:testAction", "", TaskId.EMPTY_TASK_ID, Map.of()); + ActionTestUtils.execute(new Action("internal:testAction", transportService, clusterService, threadPool), task, request, listener); assertFalse(listener.isDone()); request.decRef(); assertTrue(request.hasReferences()); - setState(clusterService, ClusterStateCreationUtils.state(localNode, localNode, allNodes)); + + MockLogAppender.assertThatLogger( + () -> setState(clusterService, ClusterStateCreationUtils.state(localNode, localNode, allNodes)), + ClusterApplierService.class, + new MockLogAppender.SeenEventExpectation( + "listener log", + ClusterApplierService.class.getCanonicalName(), + Level.TRACE, + "calling [ClusterStateObserver[ObservingContext[ContextPreservingListener[listener for [execution of [" + + task + + "]] retrying after cluster state version [*]]]]] with change to version [*]" + ) + ); assertTrue(listener.isDone()); assertFalse(request.hasReferences()); listener.get(); From a46cef4c9e1afcfca65679d1817e31aa57273c4d Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 13 May 2024 11:02:51 +0100 Subject: [PATCH 079/119] Fix leak in TransformIndexerFailureHandlingTests (#108541) Uses `ActionListener#respondAndRelease` to release the search response properly. Also cleans up a few other warnings. Closes #108530 --- .../TransformIndexerFailureHandlingTests.java | 92 +++---------------- .../pivot/GroupByOptimizerTests.java | 2 - 2 files changed, 15 insertions(+), 79 deletions(-) diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java index ceb2507ad33a..eeef51bcbcb0 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; @@ -39,7 +38,6 @@ import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.client.NoOpClient; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.common.notifications.Level; import org.elasticsearch.xpack.core.indexing.IndexerState; @@ -105,7 +103,6 @@ */ public class TransformIndexerFailureHandlingTests extends ESTestCase { - private Client client; private ThreadPool threadPool; private static final Function EMPTY_BULK_RESPONSE = bulkRequest -> new BulkResponse( new BulkItemResponse[0], @@ -127,7 +124,6 @@ static class MockedTransformIndexer extends ClientTransformIndexer { ClusterService clusterService, IndexNameExpressionResolver indexNameExpressionResolver, TransformExtension transformExtension, - String executorName, IndexBasedTransformConfigManager transformsConfigManager, CheckpointProvider checkpointProvider, TransformConfig transformConfig, @@ -195,12 +191,7 @@ protected void doNextSearch(long waitTimeInNanos, ActionListener throw new IllegalStateException(e); } - try { - SearchResponse response = searchFunction.apply(buildSearchRequest().v2()); - nextPhase.onResponse(response); - } catch (Exception e) { - nextPhase.onFailure(e); - } + ActionListener.run(nextPhase, l -> ActionListener.respondAndRelease(l, searchFunction.apply(buildSearchRequest().v2()))); } @Override @@ -307,7 +298,6 @@ protected IterationResult doProcess(SearchResponse sea @Before public void setUpMocks() { threadPool = createThreadPool(); - client = new NoOpClient(threadPool); } @After @@ -349,17 +339,7 @@ public void testPageSizeAdapt() throws Exception { TransformAuditor auditor = MockTransformAuditor.createMockAuditor(); TransformContext context = new TransformContext(TransformTaskState.STARTED, "", 0, mock(TransformContext.Listener.class)); - MockedTransformIndexer indexer = createMockIndexer( - config, - state, - searchFunction, - bulkFunction, - null, - threadPool, - ThreadPool.Names.GENERIC, - auditor, - context - ); + MockedTransformIndexer indexer = createMockIndexer(config, state, searchFunction, bulkFunction, null, threadPool, auditor, context); final CountDownLatch latch = indexer.newLatch(1); indexer.start(); assertThat(indexer.getState(), equalTo(IndexerState.STARTED)); @@ -439,7 +419,6 @@ public void testDoProcessAggNullCheck() { bulkFunction, null, threadPool, - ThreadPool.Names.GENERIC, auditor, context ); @@ -500,17 +479,7 @@ public void testScriptError() throws Exception { TransformContext.Listener contextListener = createContextListener(failIndexerCalled, failureMessage); TransformContext context = new TransformContext(TransformTaskState.STARTED, "", 0, contextListener); - MockedTransformIndexer indexer = createMockIndexer( - config, - state, - searchFunction, - bulkFunction, - null, - threadPool, - ThreadPool.Names.GENERIC, - auditor, - context - ); + MockedTransformIndexer indexer = createMockIndexer(config, state, searchFunction, bulkFunction, null, threadPool, auditor, context); final CountDownLatch latch = indexer.newLatch(1); @@ -566,7 +535,10 @@ public void testRetentionPolicyDeleteByQueryThrowsIrrecoverable() throws Excepti ); try { AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); - Function searchFunction = searchRequest -> searchResponse; + Function searchFunction = searchRequest -> { + searchResponse.mustIncRef(); + return searchResponse; + }; Function bulkFunction = bulkRequest -> new BulkResponse(new BulkItemResponse[0], 100); @@ -595,7 +567,6 @@ public void testRetentionPolicyDeleteByQueryThrowsIrrecoverable() throws Excepti bulkFunction, deleteByQueryFunction, threadPool, - ThreadPool.Names.GENERIC, auditor, context ); @@ -659,7 +630,10 @@ public void testRetentionPolicyDeleteByQueryThrowsTemporaryProblem() throws Exce ); try { AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); - Function searchFunction = searchRequest -> searchResponse; + Function searchFunction = searchRequest -> { + searchResponse.mustIncRef(); + return searchResponse; + }; Function bulkFunction = bulkRequest -> new BulkResponse(new BulkItemResponse[0], 100); @@ -694,7 +668,6 @@ public void testRetentionPolicyDeleteByQueryThrowsTemporaryProblem() throws Exce bulkFunction, deleteByQueryFunction, threadPool, - ThreadPool.Names.GENERIC, auditor, context ); @@ -768,6 +741,7 @@ public SearchResponse apply(SearchRequest searchRequest) { new ShardSearchFailure[] { new ShardSearchFailure(new Exception()) } ); } + searchResponse.mustIncRef(); return searchResponse; } }; @@ -788,7 +762,6 @@ public SearchResponse apply(SearchRequest searchRequest) { bulkFunction, null, threadPool, - ThreadPool.Names.GENERIC, auditor, context ); @@ -889,17 +862,7 @@ public void testHandleFailureAuditing() { ) ); - MockedTransformIndexer indexer = createMockIndexer( - config, - state, - searchFunction, - bulkFunction, - null, - threadPool, - ThreadPool.Names.GENERIC, - auditor, - context - ); + MockedTransformIndexer indexer = createMockIndexer(config, state, searchFunction, bulkFunction, null, threadPool, auditor, context); indexer.handleFailure( new SearchPhaseExecutionException( @@ -1056,7 +1019,6 @@ private MockedTransformIndexer createMockIndexer( bulkFunction, null, threadPool, - ThreadPool.Names.GENERIC, mock(TransformAuditor.class), new TransformContext(TransformTaskState.STARTED, "", 0, listener), 1 @@ -1166,17 +1128,7 @@ private void testHandleFailure( ) ); - MockedTransformIndexer indexer = createMockIndexer( - config, - state, - searchFunction, - bulkFunction, - null, - threadPool, - ThreadPool.Names.GENERIC, - auditor, - context - ); + MockedTransformIndexer indexer = createMockIndexer(config, state, searchFunction, bulkFunction, null, threadPool, auditor, context); for (int i = 0; i < expectedEffectiveNumFailureRetries; ++i) { indexer.handleFailure(new Exception("exception no. " + (i + 1))); @@ -1209,22 +1161,10 @@ private MockedTransformIndexer createMockIndexer( Function bulkFunction, Function deleteByQueryFunction, ThreadPool threadPool, - String executorName, TransformAuditor auditor, TransformContext context ) { - return createMockIndexer( - config, - state, - searchFunction, - bulkFunction, - deleteByQueryFunction, - threadPool, - executorName, - auditor, - context, - 0 - ); + return createMockIndexer(config, state, searchFunction, bulkFunction, deleteByQueryFunction, threadPool, auditor, context, 0); } private MockedTransformIndexer createMockIndexer( @@ -1234,7 +1174,6 @@ private MockedTransformIndexer createMockIndexer( Function bulkFunction, Function deleteByQueryFunction, ThreadPool threadPool, - String executorName, TransformAuditor auditor, TransformContext context, int doProcessCount @@ -1250,7 +1189,6 @@ private MockedTransformIndexer createMockIndexer( mock(ClusterService.class), mock(IndexNameExpressionResolver.class), mock(TransformExtension.class), - executorName, transformConfigManager, mock(CheckpointProvider.class), config, diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/GroupByOptimizerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/GroupByOptimizerTests.java index a1ea87a97719..039781aedb4b 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/GroupByOptimizerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/GroupByOptimizerTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.transform.transforms.pivot; -import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.transform.TransformConfigVersion; import org.elasticsearch.xpack.core.transform.transforms.pivot.SingleGroupSource; @@ -28,7 +27,6 @@ import static org.elasticsearch.xpack.core.transform.transforms.pivot.TermsGroupSourceTests.randomTermsGroupSource; import static org.elasticsearch.xpack.core.transform.transforms.pivot.TermsGroupSourceTests.randomTermsGroupSourceNoScript; -@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/108530") public class GroupByOptimizerTests extends ESTestCase { public void testOneGroupBy() { From 56c283599d5ed73164d636592c92f6be77f65013 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 13 May 2024 11:14:24 +0100 Subject: [PATCH 080/119] GeoIP tasks should wait longer for master (#108410) Today when creating or removing the GeoIP downloader tasks we use the default 30s master-node timeout. Yet, for these internal tasks we should wait forever. This commit extends the timeout. --- docs/changelog/108410.yaml | 5 +++++ .../ingest/geoip/GeoIpDownloaderTaskExecutor.java | 8 ++++++-- 2 files changed, 11 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/108410.yaml diff --git a/docs/changelog/108410.yaml b/docs/changelog/108410.yaml new file mode 100644 index 000000000000..5fd831231a3b --- /dev/null +++ b/docs/changelog/108410.yaml @@ -0,0 +1,5 @@ +pr: 108410 +summary: GeoIP tasks should wait longer for master +area: Ingest Node +type: bug +issues: [] diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java index c04dffe82b3c..0a423cb375e8 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; @@ -341,12 +342,15 @@ && hasAtLeastOneGeoipProcessor( ); } + @UpdateForV9 // use MINUS_ONE once that means no timeout + private static final TimeValue MASTER_TIMEOUT = TimeValue.MAX_VALUE; + private void startTask(Runnable onFailure) { persistentTasksService.sendStartRequest( GEOIP_DOWNLOADER, GEOIP_DOWNLOADER, new GeoIpTaskParams(), - null, + MASTER_TIMEOUT, ActionListener.wrap(r -> logger.debug("Started geoip downloader task"), e -> { Throwable t = e instanceof RemoteTransportException ? ExceptionsHelper.unwrapCause(e) : e; if (t instanceof ResourceAlreadyExistsException == false) { @@ -368,7 +372,7 @@ private void stopTask(Runnable onFailure) { } } ); - persistentTasksService.sendRemoveRequest(GEOIP_DOWNLOADER, null, ActionListener.runAfter(listener, () -> { + persistentTasksService.sendRemoveRequest(GEOIP_DOWNLOADER, MASTER_TIMEOUT, ActionListener.runAfter(listener, () -> { IndexAbstraction databasesAbstraction = clusterService.state().metadata().getIndicesLookup().get(DATABASES_INDEX); if (databasesAbstraction != null) { // regardless of whether DATABASES_INDEX is an alias, resolve it to a concrete index From 8831aaf89a3721ae6bf17662d6e0d916ca3fe05b Mon Sep 17 00:00:00 2001 From: David Kyle Date: Mon, 13 May 2024 12:15:36 +0100 Subject: [PATCH 081/119] [ML] Add rerank task type to the internal inference service (#108452) Implements the rerank task using cross encoder models uploaded to Elasticsearch with Eland and the _ml/trained_models APIs. The cross encoder model should be configured for text_similarity. --- docs/changelog/108452.yaml | 5 + .../org/elasticsearch/TransportVersions.java | 2 +- .../inference/results/RankedDocsResults.java | 33 +++- .../TextSimilarityInferenceResults.java | 4 + .../TextSimilarityConfigUpdate.java | 7 + .../results/RankedDocsResultsTests.java | 23 ++- .../cohere/CohereRankedResponseEntity.java | 8 +- .../inference/services/ServiceUtils.java | 93 +++++++--- .../AzureOpenAiEmbeddingsServiceSettings.java | 7 +- .../rerank/CohereRerankTaskSettings.java | 2 +- .../elasticsearch/CustomElandModel.java | 33 ++++ .../CustomElandRerankTaskSettings.java | 134 ++++++++++++++ .../ElasticsearchInternalService.java | 136 ++++++++++++--- .../elasticsearch/ElasticsearchModel.java | 11 ++ .../inference/services/ServiceUtilsTests.java | 95 +++++++++- .../CohereEmbeddingsServiceSettingsTests.java | 5 +- .../ElasticsearchInternalServiceTests.java | 164 ++++++++++++++++++ .../nlp/TextSimilarityProcessor.java | 2 +- 18 files changed, 681 insertions(+), 83 deletions(-) create mode 100644 docs/changelog/108452.yaml create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandRerankTaskSettings.java diff --git a/docs/changelog/108452.yaml b/docs/changelog/108452.yaml new file mode 100644 index 000000000000..fdf531602c80 --- /dev/null +++ b/docs/changelog/108452.yaml @@ -0,0 +1,5 @@ +pr: 108452 +summary: Add the rerank task to the Elasticsearch internal inference service +area: Machine Learning +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 95961546f1e1..1a6b7bb23804 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -163,7 +163,7 @@ static TransportVersion def(int id) { public static final TransportVersion SECURITY_ROLE_DESCRIPTION = def(8_654_00_0); public static final TransportVersion ML_INFERENCE_AZURE_OPENAI_COMPLETIONS = def(8_655_00_0); public static final TransportVersion JOIN_STATUS_AGE_SERIALIZATION = def(8_656_00_0); - + public static final TransportVersion ML_RERANK_DOC_OPTIONAL = def(8_657_00_0); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java index 631aa77a282e..f82ee8b73c7a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResults.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.TaskType; @@ -27,6 +28,7 @@ import java.util.stream.Collectors; import static org.elasticsearch.TransportVersions.ML_INFERENCE_RERANK_NEW_RESPONSE_FORMAT; +import static org.elasticsearch.TransportVersions.ML_RERANK_DOC_OPTIONAL; public class RankedDocsResults implements InferenceServiceResults { public static final String NAME = "rerank_service_results"; @@ -66,7 +68,11 @@ public static ConstructingObjectParser createParser(boo * @param relevanceScore * @param text */ - public record RankedDoc(int index, float relevanceScore, String text) implements Writeable, ToXContentObject { + public record RankedDoc(int index, float relevanceScore, @Nullable String text) + implements + Comparable, + Writeable, + ToXContentObject { public static ConstructingObjectParser createParser(boolean ignoreUnknownFields) { ConstructingObjectParser parser = new ConstructingObjectParser<>( @@ -77,7 +83,7 @@ public static ConstructingObjectParser createParser(boolean ign ); parser.declareInt(ConstructingObjectParser.constructorArg(), INDEX_FIELD); parser.declareFloat(ConstructingObjectParser.constructorArg(), RELEVANCE_SCORE_FIELD); - parser.declareString(ConstructingObjectParser.constructorArg(), TEXT_FIELD); + parser.declareString(ConstructingObjectParser.optionalConstructorArg(), TEXT_FIELD); return parser; } @@ -95,7 +101,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(INDEX, index); builder.field(RELEVANCE_SCORE, relevanceScore); - builder.field(TEXT, text); + if (text != null) { + builder.field(TEXT, text); + } builder.endObject(); @@ -103,7 +111,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } public static RankedDoc of(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(ML_INFERENCE_RERANK_NEW_RESPONSE_FORMAT)) { + if (in.getTransportVersion().onOrAfter(ML_RERANK_DOC_OPTIONAL)) { + return new RankedDoc(in.readInt(), in.readFloat(), in.readOptionalString()); + } else if (in.getTransportVersion().onOrAfter(ML_INFERENCE_RERANK_NEW_RESPONSE_FORMAT)) { return new RankedDoc(in.readInt(), in.readFloat(), in.readString()); } else { return new RankedDoc(Integer.parseInt(in.readString()), Float.parseFloat(in.readString()), in.readString()); @@ -112,14 +122,18 @@ public static RankedDoc of(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(ML_INFERENCE_RERANK_NEW_RESPONSE_FORMAT)) { + if (out.getTransportVersion().onOrAfter(ML_RERANK_DOC_OPTIONAL)) { + out.writeInt(index); + out.writeFloat(relevanceScore); + out.writeOptionalString(text); + } else if (out.getTransportVersion().onOrAfter(ML_INFERENCE_RERANK_NEW_RESPONSE_FORMAT)) { out.writeInt(index); out.writeFloat(relevanceScore); - out.writeString(text); + out.writeString(text == null ? "" : text); } else { out.writeString(Integer.toString(index)); out.writeString(Float.toString(relevanceScore)); - out.writeString(text); + out.writeString(text == null ? "" : text); } } @@ -127,6 +141,11 @@ public Map asMap() { return Map.of(NAME, Map.of(INDEX, index, RELEVANCE_SCORE, relevanceScore, TEXT, text)); } + @Override + public int compareTo(RankedDoc other) { + return Float.compare(other.relevanceScore, this.relevanceScore); + } + public String toString() { return "RankedDoc{" + "index='" diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/TextSimilarityInferenceResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/TextSimilarityInferenceResults.java index b8b75e2bf7eb..412ccfa7b24a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/TextSimilarityInferenceResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/results/TextSimilarityInferenceResults.java @@ -58,6 +58,10 @@ public String getResultsField() { return resultsField; } + public double score() { + return score; + } + @Override public Double predictedValue() { return score; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdate.java index 2ddbf8bd63f4..4e914cba1ff0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextSimilarityConfigUpdate.java @@ -69,6 +69,13 @@ public static TextSimilarityConfigUpdate fromMap(Map map) { private final String resultsField; private final TextSimilarityConfig.SpanScoreFunction spanScoreFunction; + public TextSimilarityConfigUpdate(String text) { + super((TokenizationUpdate) null); + this.text = ExceptionsHelper.requireNonNull(text, TEXT); + this.resultsField = null; + this.spanScoreFunction = null; + } + public TextSimilarityConfigUpdate( String text, @Nullable String resultsField, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResultsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResultsTests.java index 3be073b43982..603531f0aedf 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResultsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/results/RankedDocsResultsTests.java @@ -16,6 +16,8 @@ import java.util.ArrayList; import java.util.List; +import static org.elasticsearch.TransportVersions.ML_RERANK_DOC_OPTIONAL; + public class RankedDocsResultsTests extends AbstractBWCSerializationTestCase { @Override @@ -33,7 +35,7 @@ public static RankedDocsResults createRandom() { } public static RankedDocsResults.RankedDoc createRandomDoc() { - return new RankedDocsResults.RankedDoc(randomIntBetween(0, 100), randomFloat(), randomAlphaOfLength(10)); + return new RankedDocsResults.RankedDoc(randomIntBetween(0, 100), randomFloat(), randomBoolean() ? null : randomAlphaOfLength(10)); } @Override @@ -45,7 +47,24 @@ protected RankedDocsResults mutateInstance(RankedDocsResults instance) throws IO @Override protected RankedDocsResults mutateInstanceForVersion(RankedDocsResults instance, TransportVersion fromVersion) { - return instance; + if (fromVersion.onOrAfter(ML_RERANK_DOC_OPTIONAL)) { + return instance; + } else { + var compatibleDocs = rankedDocsNullStringToEmpty(instance.getRankedDocs()); + return new RankedDocsResults(compatibleDocs); + } + } + + private List rankedDocsNullStringToEmpty(List rankedDocs) { + var result = new ArrayList(rankedDocs.size()); + for (var doc : rankedDocs) { + if (doc.text() == null) { + result.add(new RankedDocsResults.RankedDoc(doc.index(), doc.relevanceScore(), "")); + } else { + result.add(doc); + } + } + return result; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereRankedResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereRankedResponseEntity.java index 93141727f705..c9cc71b7fdcd 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereRankedResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/cohere/CohereRankedResponseEntity.java @@ -148,14 +148,12 @@ private static RankedDocsResults.RankedDoc parseRankedDocObject(XContentParser p } if (index == -1) { - logger.error("Failed to find required field [index] in Cohere embeddings response"); + logger.warn("Failed to find required field [index] in Cohere rerank response"); } if (relevanceScore == -1) { - logger.error("Failed to find required field [relevance_score] in Cohere embeddings response"); - } - if (documentText == null) { - logger.error("Failed to find required field [document] in Cohere embeddings response"); + logger.warn("Failed to find required field [relevance_score] in Cohere rerank response"); } + // documentText may or may not be present depending on the request parameter return new RankedDocsResults.RankedDoc(index, relevanceScore, documentText); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java index 6f9e32e32f66..47c7cc0fce01 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java @@ -60,16 +60,42 @@ public static T removeAsType(Map sourceMap, String key, Clas if (type.isAssignableFrom(o.getClass())) { return (T) o; } else { - throw new ElasticsearchStatusException( - "field [{}] is not of the expected type." + " The value [{}] cannot be converted to a [{}]", - RestStatus.BAD_REQUEST, - key, - o, - type.getSimpleName() - ); + throw new ElasticsearchStatusException(invalidTypeErrorMsg(key, o, type.getSimpleName()), RestStatus.BAD_REQUEST); } } + /** + * Remove the object from the map and cast to the expected type. + * If the object cannot be cast to type and error is added to the + * {@code validationException} parameter + * + * @param sourceMap Map containing fields + * @param key The key of the object to remove + * @param type The expected type of the removed object + * @param validationException If the value is not of type {@code type} + * @return {@code null} if not present else the object cast to type T + * @param The expected type + */ + @SuppressWarnings("unchecked") + public static T removeAsType(Map sourceMap, String key, Class type, ValidationException validationException) { + Object o = sourceMap.remove(key); + if (o == null) { + return null; + } + + if (type.isAssignableFrom(o.getClass())) { + return (T) o; + } else { + validationException.addValidationError(invalidTypeErrorMsg(key, o, type.getSimpleName())); + return null; + } + } + + @SuppressWarnings("unchecked") + public static Map removeFromMap(Map sourceMap, String fieldName) { + return (Map) sourceMap.remove(fieldName); + } + @SuppressWarnings("unchecked") public static Map removeFromMapOrThrowIfNull(Map sourceMap, String fieldName) { Map value = (Map) sourceMap.remove(fieldName); @@ -116,6 +142,15 @@ public static String missingSettingErrorMsg(String settingName, String scope) { return Strings.format("[%s] does not contain the required setting [%s]", scope, settingName); } + public static String invalidTypeErrorMsg(String settingName, Object foundObject, String expectedType) { + return Strings.format( + "field [%s] is not of the expected type. The value [%s] cannot be converted to a [%s]", + settingName, + foundObject, + expectedType + ); + } + public static String invalidUrlErrorMsg(String url, String settingName, String settingScope) { return Strings.format("[%s] Invalid url [%s] received for field [%s]", settingScope, url, settingName); } @@ -230,7 +265,13 @@ public static String extractRequiredString( String scope, ValidationException validationException ) { - String requiredField = ServiceUtils.removeAsType(map, settingName, String.class); + int initialValidationErrorCount = validationException.validationErrors().size(); + String requiredField = ServiceUtils.removeAsType(map, settingName, String.class, validationException); + + if (validationException.validationErrors().size() > initialValidationErrorCount) { + // new validation error occurred + return null; + } if (requiredField == null) { validationException.addValidationError(ServiceUtils.missingSettingErrorMsg(settingName, scope)); @@ -238,7 +279,7 @@ public static String extractRequiredString( validationException.addValidationError(ServiceUtils.mustBeNonEmptyString(settingName, scope)); } - if (validationException.validationErrors().isEmpty() == false) { + if (validationException.validationErrors().size() > initialValidationErrorCount) { return null; } @@ -251,13 +292,19 @@ public static String extractOptionalString( String scope, ValidationException validationException ) { - String optionalField = ServiceUtils.removeAsType(map, settingName, String.class); + int initialValidationErrorCount = validationException.validationErrors().size(); + String optionalField = ServiceUtils.removeAsType(map, settingName, String.class, validationException); + + if (validationException.validationErrors().size() > initialValidationErrorCount) { + // new validation error occurred + return null; + } if (optionalField != null && optionalField.isEmpty()) { validationException.addValidationError(ServiceUtils.mustBeNonEmptyString(settingName, scope)); } - if (validationException.validationErrors().isEmpty() == false) { + if (validationException.validationErrors().size() > initialValidationErrorCount) { return null; } @@ -270,13 +317,18 @@ public static Integer extractOptionalPositiveInteger( String scope, ValidationException validationException ) { - Integer optionalField = ServiceUtils.removeAsType(map, settingName, Integer.class); + int initialValidationErrorCount = validationException.validationErrors().size(); + Integer optionalField = ServiceUtils.removeAsType(map, settingName, Integer.class, validationException); + + if (validationException.validationErrors().size() > initialValidationErrorCount) { + return null; + } if (optionalField != null && optionalField <= 0) { validationException.addValidationError(ServiceUtils.mustBeAPositiveNumberErrorMessage(settingName, scope, optionalField)); } - if (validationException.validationErrors().isEmpty() == false) { + if (validationException.validationErrors().size() > initialValidationErrorCount) { return null; } @@ -309,19 +361,8 @@ public static > E extractOptionalEnum( return null; } - public static Boolean extractOptionalBoolean( - Map map, - String settingName, - String scope, - ValidationException validationException - ) { - Boolean optionalField = ServiceUtils.removeAsType(map, settingName, Boolean.class); - - if (validationException.validationErrors().isEmpty() == false) { - return null; - } - - return optionalField; + public static Boolean extractOptionalBoolean(Map map, String settingName, ValidationException validationException) { + return ServiceUtils.removeAsType(map, settingName, Boolean.class, validationException); } public static TimeValue extractOptionalTimeValue( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettings.java index 514d5684fc7c..33bb0fdb07c5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/embeddings/AzureOpenAiEmbeddingsServiceSettings.java @@ -92,12 +92,7 @@ private static CommonFields fromMap( SimilarityMeasure similarity = extractSimilarity(map, ModelConfigurations.SERVICE_SETTINGS, validationException); RateLimitSettings rateLimitSettings = RateLimitSettings.of(map, DEFAULT_RATE_LIMIT_SETTINGS, validationException); - Boolean dimensionsSetByUser = extractOptionalBoolean( - map, - DIMENSIONS_SET_BY_USER, - ModelConfigurations.SERVICE_SETTINGS, - validationException - ); + Boolean dimensionsSetByUser = extractOptionalBoolean(map, DIMENSIONS_SET_BY_USER, validationException); switch (context) { case REQUEST -> { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankTaskSettings.java index 75588aa2b503..82f2d0e6f7ad 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/rerank/CohereRerankTaskSettings.java @@ -49,7 +49,7 @@ public static CohereRerankTaskSettings fromMap(Map map) { return EMPTY_SETTINGS; } - Boolean returnDocuments = extractOptionalBoolean(map, RETURN_DOCUMENTS, ModelConfigurations.TASK_SETTINGS, validationException); + Boolean returnDocuments = extractOptionalBoolean(map, RETURN_DOCUMENTS, validationException); Integer topNDocumentsOnly = extractOptionalPositiveInteger( map, TOP_N_DOCS_ONLY, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandModel.java index aa05af946156..1f9ec163aa54 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandModel.java @@ -9,16 +9,32 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.inference.TaskType; import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import java.util.Map; + import static org.elasticsearch.xpack.core.ml.inference.assignment.AllocationStatus.State.STARTED; public class CustomElandModel extends ElasticsearchModel { + public static CustomElandModel build( + String inferenceEntityId, + TaskType taskType, + String service, + CustomElandInternalServiceSettings serviceSettings, + @Nullable TaskSettings taskSettings + ) { + return taskSettings == null + ? new CustomElandModel(inferenceEntityId, taskType, service, serviceSettings) + : new CustomElandModel(inferenceEntityId, taskType, service, serviceSettings, taskSettings); + } + public CustomElandModel( String inferenceEntityId, TaskType taskType, @@ -28,6 +44,16 @@ public CustomElandModel( super(inferenceEntityId, taskType, service, serviceSettings); } + private CustomElandModel( + String inferenceEntityId, + TaskType taskType, + String service, + CustomElandInternalServiceSettings serviceSettings, + TaskSettings taskSettings + ) { + super(inferenceEntityId, taskType, service, serviceSettings, taskSettings); + } + @Override public CustomElandInternalServiceSettings getServiceSettings() { return (CustomElandInternalServiceSettings) super.getServiceSettings(); @@ -76,4 +102,11 @@ public void onFailure(Exception e) { }; } + public static TaskSettings taskSettingsFromMap(TaskType taskType, Map taskSettingsMap) { + if (TaskType.RERANK.equals(taskType)) { + return CustomElandRerankTaskSettings.defaultsFromMap(taskSettingsMap); + } + + return null; + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandRerankTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandRerankTaskSettings.java new file mode 100644 index 000000000000..a82ffbba3d68 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/CustomElandRerankTaskSettings.java @@ -0,0 +1,134 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.elasticsearch; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.TaskSettings; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalBoolean; + +/** + * Defines the task settings for internal rerank service. + */ +public class CustomElandRerankTaskSettings implements TaskSettings { + + public static final String NAME = "custom_eland_rerank_task_settings"; + public static final String RETURN_DOCUMENTS = "return_documents"; + + static final CustomElandRerankTaskSettings DEFAULT_SETTINGS = new CustomElandRerankTaskSettings(Boolean.TRUE); + + public static CustomElandRerankTaskSettings defaultsFromMap(Map map) { + ValidationException validationException = new ValidationException(); + + if (map == null || map.isEmpty()) { + return DEFAULT_SETTINGS; + } + + Boolean returnDocuments = extractOptionalBoolean(map, RETURN_DOCUMENTS, validationException); + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + if (returnDocuments == null) { + returnDocuments = true; + } + + return new CustomElandRerankTaskSettings(returnDocuments); + } + + /** + * From map without any validation + * @param map source map + * @return Task settings + */ + public static CustomElandRerankTaskSettings fromMap(Map map) { + if (map == null || map.isEmpty()) { + return DEFAULT_SETTINGS; + } + + Boolean returnDocuments = extractOptionalBoolean(map, RETURN_DOCUMENTS, new ValidationException()); + return new CustomElandRerankTaskSettings(returnDocuments); + } + + /** + * Return either the request or orignal settings by preferring non-null fields + * from the request settings over the original settings. + * + * @param originalSettings the settings stored as part of the inference entity configuration + * @param requestTaskSettings the settings passed in within the task_settings field of the request + * @return Either {@code originalSettings} or {@code requestTaskSettings} + */ + public static CustomElandRerankTaskSettings of( + CustomElandRerankTaskSettings originalSettings, + CustomElandRerankTaskSettings requestTaskSettings + ) { + return requestTaskSettings.returnDocuments() != null ? requestTaskSettings : originalSettings; + } + + private final Boolean returnDocuments; + + public CustomElandRerankTaskSettings(StreamInput in) throws IOException { + this(in.readOptionalBoolean()); + } + + public CustomElandRerankTaskSettings(@Nullable Boolean doReturnDocuments) { + this.returnDocuments = doReturnDocuments; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (returnDocuments != null) { + builder.field(RETURN_DOCUMENTS, returnDocuments); + } + builder.endObject(); + return builder; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_INFERENCE_COHERE_RERANK; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalBoolean(returnDocuments); + } + + public Boolean returnDocuments() { + return returnDocuments; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CustomElandRerankTaskSettings that = (CustomElandRerankTaskSettings) o; + return Objects.equals(returnDocuments, that.returnDocuments); + } + + @Override + public int hashCode() { + return Objects.hash(returnDocuments); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index cceeb59284c1..408e3ec1ccbc 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -30,6 +30,7 @@ import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; +import org.elasticsearch.xpack.core.inference.results.RankedDocsResults; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; @@ -40,18 +41,22 @@ import org.elasticsearch.xpack.core.ml.inference.TrainedModelInput; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextEmbeddingConfigUpdate; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextSimilarityConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TokenizationConfigUpdate; import org.elasticsearch.xpack.inference.services.settings.InternalServiceSettings; import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Function; import static org.elasticsearch.xpack.core.ClientHelper.INFERENCE_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.inference.results.ResultUtils.createInvalidChunkedResultException; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMap; import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrThrowIfNull; import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; import static org.elasticsearch.xpack.inference.services.settings.InternalServiceSettings.MODEL_ID; @@ -85,6 +90,7 @@ public void parseRequestConfig( ) { try { Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); + Map taskSettingsMap = removeFromMap(config, ModelConfigurations.TASK_SETTINGS); String modelId = (String) serviceSettingsMap.get(MODEL_ID); if (modelId == null) { throw new IllegalArgumentException("Error parsing request config, model id is missing"); @@ -93,7 +99,7 @@ public void parseRequestConfig( e5Case(inferenceEntityId, taskType, config, platformArchitectures, serviceSettingsMap, modelListener); } else { throwIfNotEmptyMap(config, name()); - customElandCase(inferenceEntityId, taskType, serviceSettingsMap, modelListener); + customElandCase(inferenceEntityId, taskType, serviceSettingsMap, taskSettingsMap, modelListener); } } catch (Exception e) { modelListener.onFailure(e); @@ -104,6 +110,7 @@ private void customElandCase( String inferenceEntityId, TaskType taskType, Map serviceSettingsMap, + Map taskSettingsMap, ActionListener modelListener ) { String modelId = (String) serviceSettingsMap.get(MODEL_ID); @@ -121,7 +128,18 @@ private void customElandCase( serviceSettingsMap ).build(); throwIfNotEmptyMap(serviceSettingsMap, name()); - delegate.onResponse(new CustomElandModel(inferenceEntityId, taskType, name(), customElandInternalServiceSettings)); + + var taskSettings = CustomElandModel.taskSettingsFromMap(TaskType.RERANK, taskSettingsMap); + throwIfNotEmptyMap(taskSettingsMap, name()); + + var model = CustomElandModel.build( + inferenceEntityId, + TaskType.RERANK, + name(), + customElandInternalServiceSettings, + taskSettings + ); + delegate.onResponse(model); } }); @@ -184,6 +202,7 @@ public ElasticsearchModel parsePersistedConfigWithSecrets( @Override public ElasticsearchModel parsePersistedConfig(String inferenceEntityId, TaskType taskType, Map config) { Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); + Map taskSettingsMap = removeFromMap(config, ModelConfigurations.TASK_SETTINGS); String modelId = (String) serviceSettingsMap.get(MODEL_ID); if (modelId == null) { @@ -198,14 +217,12 @@ public ElasticsearchModel parsePersistedConfig(String inferenceEntityId, TaskTyp (MultilingualE5SmallInternalServiceSettings) MultilingualE5SmallInternalServiceSettings.fromMap(serviceSettingsMap).build() ); } else { - return new CustomElandModel( - inferenceEntityId, - taskType, - name(), - (CustomElandInternalServiceSettings) CustomElandInternalServiceSettings.fromMap(serviceSettingsMap).build() - ); - } + var serviceSettings = (CustomElandInternalServiceSettings) CustomElandInternalServiceSettings.fromMap(serviceSettingsMap) + .build(); + var taskSettings = CustomElandModel.taskSettingsFromMap(taskType, taskSettingsMap); + return CustomElandModel.build(inferenceEntityId, taskType, name(), serviceSettings, taskSettings); + } } @Override @@ -218,13 +235,23 @@ public void infer( TimeValue timeout, ActionListener listener ) { - try { - checkCompatibleTaskType(model.getConfigurations().getTaskType()); - } catch (Exception e) { - listener.onFailure(e); - return; + var taskType = model.getConfigurations().getTaskType(); + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + inferTextEmbedding(model, input, inputType, timeout, listener); + } else if (TaskType.RERANK.equals(taskType)) { + inferRerank(model, query, input, timeout, taskSettings, listener); + } else { + throw new ElasticsearchStatusException(TaskType.unsupportedTaskTypeErrorMsg(taskType, NAME), RestStatus.BAD_REQUEST); } + } + public void inferTextEmbedding( + Model model, + List input, + InputType inputType, + TimeValue timeout, + ActionListener listener + ) { var request = InferTrainedModelDeploymentAction.Request.forTextInput( model.getConfigurations().getInferenceEntityId(), TextEmbeddingConfigUpdate.EMPTY_INSTANCE, @@ -239,6 +266,37 @@ public void infer( ); } + public void inferRerank( + Model model, + String query, + List inputs, + TimeValue timeout, + Map requestTaskSettings, + ActionListener listener + ) { + var config = new TextSimilarityConfigUpdate(query); + var request = InferTrainedModelDeploymentAction.Request.forTextInput( + model.getConfigurations().getInferenceEntityId(), + config, + inputs, + timeout + ); + + var modelSettings = (CustomElandRerankTaskSettings) model.getTaskSettings(); + var requestSettings = CustomElandRerankTaskSettings.fromMap(requestTaskSettings); + Boolean returnDocs = CustomElandRerankTaskSettings.of(modelSettings, requestSettings).returnDocuments(); + + Function inputSupplier = returnDocs == Boolean.TRUE ? inputs::get : i -> null; + + client.execute( + InferTrainedModelDeploymentAction.INSTANCE, + request, + listener.delegateFailureAndWrap( + (l, inferenceResult) -> l.onResponse(textSimilarityResultsToRankedDocs(inferenceResult.getResults(), inputSupplier)) + ) + ); + } + public void chunkedInfer( Model model, List input, @@ -262,10 +320,10 @@ public void chunkedInfer( TimeValue timeout, ActionListener> listener ) { - try { - checkCompatibleTaskType(model.getConfigurations().getTaskType()); - } catch (Exception e) { - listener.onFailure(e); + if (TaskType.TEXT_EMBEDDING.isAnyOrSame(model.getTaskType()) == false) { + listener.onFailure( + new ElasticsearchStatusException(TaskType.unsupportedTaskTypeErrorMsg(model.getTaskType(), NAME), RestStatus.BAD_REQUEST) + ); return; } @@ -315,7 +373,7 @@ public void start(Model model, ActionListener listener) { return; } - if (model.getConfigurations().getTaskType() != TaskType.TEXT_EMBEDDING) { + if (model.getTaskType() != TaskType.TEXT_EMBEDDING && model.getTaskType() != TaskType.RERANK) { listener.onFailure( new IllegalStateException(TaskType.unsupportedTaskTypeErrorMsg(model.getConfigurations().getTaskType(), NAME)) ); @@ -364,7 +422,7 @@ public void putModel(Model model, ActionListener listener) { } }) ); - } else if (model instanceof CustomElandModel elandModel) { + } else if (model instanceof CustomElandModel) { logger.info("Custom eland model detected, model must have been already loaded into the cluster with eland."); listener.onResponse(Boolean.TRUE); } else { @@ -412,12 +470,6 @@ private static IllegalStateException notTextEmbeddingModelException(Model model) ); } - private void checkCompatibleTaskType(TaskType taskType) { - if (TaskType.TEXT_EMBEDDING.isAnyOrSame(taskType) == false) { - throw new ElasticsearchStatusException(TaskType.unsupportedTaskTypeErrorMsg(taskType, NAME), RestStatus.BAD_REQUEST); - } - } - @Override public boolean isInClusterService() { return true; @@ -448,4 +500,36 @@ private static String selectDefaultModelVariantBasedOnClusterArchitecture(Set results, + Function inputSupplier + ) { + List rankings = new ArrayList<>(results.size()); + for (int i = 0; i < results.size(); i++) { + var result = results.get(i); + if (result instanceof org.elasticsearch.xpack.core.ml.inference.results.TextSimilarityInferenceResults similarity) { + rankings.add(new RankedDocsResults.RankedDoc(i, (float) similarity.score(), inputSupplier.apply(i))); + } else if (result instanceof org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults errorResult) { + if (errorResult.getException() instanceof ElasticsearchStatusException statusException) { + throw statusException; + } else { + throw new ElasticsearchStatusException( + "Received error inference result.", + RestStatus.INTERNAL_SERVER_ERROR, + errorResult.getException() + ); + } + } else { + throw new IllegalArgumentException( + "Received invalid inference result, of type " + + result.getClass().getName() + + " but expected TextSimilarityInferenceResults." + ); + } + } + + Collections.sort(rankings); + return new RankedDocsResults(rankings); + } + } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchModel.java index 954469537a4c..dc6561ba992f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchModel.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.inference.TaskType; import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; @@ -25,6 +26,16 @@ public ElasticsearchModel( super(new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings)); } + public ElasticsearchModel( + String inferenceEntityId, + TaskType taskType, + String service, + ElasticsearchInternalServiceSettings serviceSettings, + TaskSettings taskSettings + ) { + super(new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings)); + } + @Override public ElasticsearchInternalServiceSettings getServiceSettings() { return (ElasticsearchInternalServiceSettings) super.getServiceSettings(); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java index 26f6e5b7e694..7d0d076a0a22 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceUtilsTests.java @@ -31,6 +31,7 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.convertToUri; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createUri; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalEnum; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalPositiveInteger; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalTimeValue; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredSecureString; @@ -71,6 +72,21 @@ public void testRemoveAsTypeWithTheCorrectType() { assertThat(map.entrySet(), empty()); } + public void testRemoveAsType_Validation_WithTheCorrectType() { + Map map = new HashMap<>(Map.of("a", 5, "b", "a string", "c", Boolean.TRUE, "d", 1.0)); + + ValidationException validationException = new ValidationException(); + Integer i = ServiceUtils.removeAsType(map, "a", Integer.class, validationException); + assertEquals(Integer.valueOf(5), i); + assertNull(map.get("a")); // field has been removed + assertThat(validationException.validationErrors(), empty()); + + String str = ServiceUtils.removeAsType(map, "b", String.class, validationException); + assertEquals("a string", str); + assertNull(map.get("b")); + assertThat(validationException.validationErrors(), empty()); + } + public void testRemoveAsTypeWithInCorrectType() { Map map = new HashMap<>(Map.of("a", 5, "b", "a string", "c", Boolean.TRUE, "d", 5.0, "e", 5)); @@ -113,6 +129,62 @@ public void testRemoveAsTypeWithInCorrectType() { assertThat(map.entrySet(), empty()); } + public void testRemoveAsType_Validation_WithInCorrectType() { + Map map = new HashMap<>(Map.of("a", 5, "b", "a string", "c", Boolean.TRUE, "d", 5.0, "e", 5)); + + var validationException = new ValidationException(); + Object result = ServiceUtils.removeAsType(map, "a", String.class, validationException); + assertNull(result); + assertThat(validationException.validationErrors(), hasSize(1)); + assertThat( + validationException.validationErrors().get(0), + containsString("field [a] is not of the expected type. The value [5] cannot be converted to a [String]") + ); + + validationException = new ValidationException(); + ServiceUtils.removeAsType(map, "b", Boolean.class, validationException); + assertThat(validationException.validationErrors(), hasSize(1)); + assertThat( + validationException.validationErrors().get(0), + containsString("field [b] is not of the expected type. The value [a string] cannot be converted to a [Boolean]") + ); + assertNull(map.get("b")); + + validationException = new ValidationException(); + result = ServiceUtils.removeAsType(map, "c", Integer.class, validationException); + assertNull(result); + assertThat(validationException.validationErrors(), hasSize(1)); + assertThat( + validationException.validationErrors().get(0), + containsString("field [c] is not of the expected type. The value [true] cannot be converted to a [Integer]") + ); + assertNull(map.get("c")); + + // cannot convert double to integer + validationException = new ValidationException(); + result = ServiceUtils.removeAsType(map, "d", Integer.class, validationException); + assertNull(result); + assertThat(validationException.validationErrors(), hasSize(1)); + assertThat( + validationException.validationErrors().get(0), + containsString("field [d] is not of the expected type. The value [5.0] cannot be converted to a [Integer]") + ); + assertNull(map.get("d")); + + // cannot convert integer to double + validationException = new ValidationException(); + result = ServiceUtils.removeAsType(map, "e", Double.class, validationException); + assertNull(result); + assertThat(validationException.validationErrors(), hasSize(1)); + assertThat( + validationException.validationErrors().get(0), + containsString("field [e] is not of the expected type. The value [5] cannot be converted to a [Double]") + ); + assertNull(map.get("d")); + + assertThat(map.entrySet(), empty()); + } + public void testRemoveAsTypeMissingReturnsNull() { Map map = new HashMap<>(Map.of("a", 5, "b", "a string", "c", Boolean.TRUE)); assertNull(ServiceUtils.removeAsType(new HashMap<>(), "missing", Integer.class)); @@ -197,10 +269,11 @@ public void testExtractRequiredSecureString_AddsException_WhenFieldIsEmpty() { public void testExtractRequiredString_CreatesString() { var validation = new ValidationException(); + validation.addValidationError("previous error"); Map map = modifiableMap(Map.of("key", "value")); var createdString = extractRequiredString(map, "key", "scope", validation); - assertTrue(validation.validationErrors().isEmpty()); + assertThat(validation.validationErrors(), hasSize(1)); assertNotNull(createdString); assertThat(createdString, is("value")); assertTrue(map.isEmpty()); @@ -208,24 +281,27 @@ public void testExtractRequiredString_CreatesString() { public void testExtractRequiredString_AddsException_WhenFieldDoesNotExist() { var validation = new ValidationException(); + validation.addValidationError("previous error"); + Map map = modifiableMap(Map.of("key", "value")); var createdString = extractRequiredSecureString(map, "abc", "scope", validation); assertNull(createdString); - assertFalse(validation.validationErrors().isEmpty()); + assertThat(validation.validationErrors(), hasSize(2)); assertThat(map.size(), is(1)); - assertThat(validation.validationErrors().get(0), is("[scope] does not contain the required setting [abc]")); + assertThat(validation.validationErrors().get(1), is("[scope] does not contain the required setting [abc]")); } public void testExtractRequiredString_AddsException_WhenFieldIsEmpty() { var validation = new ValidationException(); + validation.addValidationError("previous error"); Map map = modifiableMap(Map.of("key", "")); var createdString = extractOptionalString(map, "key", "scope", validation); assertNull(createdString); assertFalse(validation.validationErrors().isEmpty()); assertTrue(map.isEmpty()); - assertThat(validation.validationErrors().get(0), is("[scope] Invalid value empty string. [key] must be a non-empty string")); + assertThat(validation.validationErrors().get(1), is("[scope] Invalid value empty string. [key] must be a non-empty string")); } public void testExtractOptionalString_CreatesString() { @@ -241,11 +317,12 @@ public void testExtractOptionalString_CreatesString() { public void testExtractOptionalString_DoesNotAddException_WhenFieldDoesNotExist() { var validation = new ValidationException(); + validation.addValidationError("previous error"); Map map = modifiableMap(Map.of("key", "value")); var createdString = extractOptionalString(map, "abc", "scope", validation); assertNull(createdString); - assertTrue(validation.validationErrors().isEmpty()); + assertThat(validation.validationErrors(), hasSize(1)); assertThat(map.size(), is(1)); } @@ -260,6 +337,14 @@ public void testExtractOptionalString_AddsException_WhenFieldIsEmpty() { assertThat(validation.validationErrors().get(0), is("[scope] Invalid value empty string. [key] must be a non-empty string")); } + public void testExtractOptionalPositiveInt() { + var validation = new ValidationException(); + validation.addValidationError("previous error"); + Map map = modifiableMap(Map.of("abc", 1)); + assertEquals(Integer.valueOf(1), extractOptionalPositiveInteger(map, "abc", "scope", validation)); + assertThat(validation.validationErrors(), hasSize(1)); + } + public void testExtractOptionalEnum_ReturnsNull_WhenFieldDoesNotExist() { var validation = new ValidationException(); Map map = modifiableMap(Map.of("key", "value")); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java index 1ac97642f0b8..a306a3e660cd 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.inference.services.cohere.embeddings; -import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -247,7 +246,7 @@ public void testFromMap_InvalidEmbeddingType_ThrowsError_ForPersistent() { public void testFromMap_ReturnsFailure_WhenEmbeddingTypesAreNotValid() { var exception = expectThrows( - ElasticsearchStatusException.class, + ValidationException.class, () -> CohereEmbeddingsServiceSettings.fromMap( new HashMap<>(Map.of(CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, List.of("abc"))), ConfigurationParseContext.PERSISTENT @@ -256,7 +255,7 @@ public void testFromMap_ReturnsFailure_WhenEmbeddingTypesAreNotValid() { MatcherAssert.assertThat( exception.getMessage(), - is("field [embedding_type] is not of the expected type. The value [[abc]] cannot be converted to a [String]") + containsString("field [embedding_type] is not of the expected type. The value [[abc]] cannot be converted to a [String]") ); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java index 7212edbb8cf8..ea11e9d0343e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java @@ -24,15 +24,22 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xpack.core.action.util.QueryPage; import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; import org.elasticsearch.xpack.core.inference.results.ErrorChunkedInferenceResults; +import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResultsTests; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TokenizationConfigUpdate; import org.elasticsearch.xpack.inference.services.ServiceFields; import org.elasticsearch.xpack.inference.services.settings.InternalServiceSettings; +import org.junit.After; +import org.junit.Before; +import org.mockito.Mockito; import java.util.ArrayList; import java.util.Arrays; @@ -41,6 +48,7 @@ import java.util.Map; import java.util.Random; import java.util.Set; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; @@ -59,6 +67,18 @@ public class ElasticsearchInternalServiceTests extends ESTestCase { TaskType taskType = TaskType.TEXT_EMBEDDING; String randomInferenceEntityId = randomAlphaOfLength(10); + private static ThreadPool threadPool; + + @Before + public void setUpThreadPool() { + threadPool = new TestThreadPool("test"); + } + + @After + public void shutdownThreadPool() { + TestThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); + } + public void testParseRequestConfig() { // Null model variant @@ -220,6 +240,95 @@ public void testParseRequestConfig() { } } + @SuppressWarnings("unchecked") + public void testParseRequestConfig_Rerank() { + // with task settings + { + var client = mock(Client.class); + doAnswer(invocation -> { + var listener = (ActionListener) invocation.getArguments()[2]; + listener.onResponse( + new GetTrainedModelsAction.Response(new QueryPage<>(List.of(mock(TrainedModelConfig.class)), 1, mock(ParseField.class))) + ); + return null; + }).when(client).execute(Mockito.same(GetTrainedModelsAction.INSTANCE), any(), any()); + + when(client.threadPool()).thenReturn(threadPool); + + var service = createService(client); + var settings = new HashMap(); + settings.put( + ModelConfigurations.SERVICE_SETTINGS, + new HashMap<>( + Map.of( + ElasticsearchInternalServiceSettings.NUM_ALLOCATIONS, + 1, + ElasticsearchInternalServiceSettings.NUM_THREADS, + 4, + InternalServiceSettings.MODEL_ID, + "foo" + ) + ) + ); + var returnDocs = randomBoolean(); + settings.put( + ModelConfigurations.TASK_SETTINGS, + new HashMap<>(Map.of(CustomElandRerankTaskSettings.RETURN_DOCUMENTS, returnDocs)) + ); + + ActionListener modelListener = ActionListener.wrap(model -> { + assertThat(model, instanceOf(CustomElandModel.class)); + assertThat(model.getTaskSettings(), instanceOf(CustomElandRerankTaskSettings.class)); + assertThat(model.getServiceSettings(), instanceOf(ElasticsearchInternalServiceSettings.class)); + assertEquals(returnDocs, ((CustomElandRerankTaskSettings) model.getTaskSettings()).returnDocuments()); + }, e -> { fail("Model parsing failed " + e.getMessage()); }); + + service.parseRequestConfig(randomInferenceEntityId, taskType, settings, Set.of(), modelListener); + } + } + + @SuppressWarnings("unchecked") + public void testParseRequestConfig_Rerank_DefaultTaskSettings() { + // with task settings + { + var client = mock(Client.class); + doAnswer(invocation -> { + var listener = (ActionListener) invocation.getArguments()[2]; + listener.onResponse( + new GetTrainedModelsAction.Response(new QueryPage<>(List.of(mock(TrainedModelConfig.class)), 1, mock(ParseField.class))) + ); + return null; + }).when(client).execute(Mockito.same(GetTrainedModelsAction.INSTANCE), any(), any()); + + when(client.threadPool()).thenReturn(threadPool); + + var service = createService(client); + var settings = new HashMap(); + settings.put( + ModelConfigurations.SERVICE_SETTINGS, + new HashMap<>( + Map.of( + ElasticsearchInternalServiceSettings.NUM_ALLOCATIONS, + 1, + ElasticsearchInternalServiceSettings.NUM_THREADS, + 4, + InternalServiceSettings.MODEL_ID, + "foo" + ) + ) + ); + + ActionListener modelListener = ActionListener.wrap(model -> { + assertThat(model, instanceOf(CustomElandModel.class)); + assertThat(model.getTaskSettings(), instanceOf(CustomElandRerankTaskSettings.class)); + assertThat(model.getServiceSettings(), instanceOf(ElasticsearchInternalServiceSettings.class)); + assertEquals(Boolean.TRUE, ((CustomElandRerankTaskSettings) model.getTaskSettings()).returnDocuments()); + }, e -> { fail("Model parsing failed " + e.getMessage()); }); + + service.parseRequestConfig(randomInferenceEntityId, taskType, settings, Set.of(), modelListener); + } + } + private ActionListener getModelVerificationActionListener(MultilingualE5SmallInternalServiceSettings e5ServiceSettings) { return ActionListener.wrap(model -> { assertEquals( @@ -480,6 +589,61 @@ public void testChunkInferSetsTokenization() { } } + public void testParsePersistedConfig_Rerank() { + // with task settings + { + var service = createService(mock(Client.class)); + var settings = new HashMap(); + settings.put( + ModelConfigurations.SERVICE_SETTINGS, + new HashMap<>( + Map.of( + ElasticsearchInternalServiceSettings.NUM_ALLOCATIONS, + 1, + ElasticsearchInternalServiceSettings.NUM_THREADS, + 4, + InternalServiceSettings.MODEL_ID, + "foo" + ) + ) + ); + settings.put(InternalServiceSettings.MODEL_ID, "foo"); + var returnDocs = randomBoolean(); + settings.put( + ModelConfigurations.TASK_SETTINGS, + new HashMap<>(Map.of(CustomElandRerankTaskSettings.RETURN_DOCUMENTS, returnDocs)) + ); + + var model = service.parsePersistedConfig(randomInferenceEntityId, TaskType.RERANK, settings); + assertThat(model.getTaskSettings(), instanceOf(CustomElandRerankTaskSettings.class)); + assertEquals(returnDocs, ((CustomElandRerankTaskSettings) model.getTaskSettings()).returnDocuments()); + } + + // without task settings + { + var service = createService(mock(Client.class)); + var settings = new HashMap(); + settings.put( + ModelConfigurations.SERVICE_SETTINGS, + new HashMap<>( + Map.of( + ElasticsearchInternalServiceSettings.NUM_ALLOCATIONS, + 1, + ElasticsearchInternalServiceSettings.NUM_THREADS, + 4, + InternalServiceSettings.MODEL_ID, + "foo" + ) + ) + ); + settings.put(InternalServiceSettings.MODEL_ID, "foo"); + + var model = service.parsePersistedConfig(randomInferenceEntityId, TaskType.RERANK, settings); + assertThat(model.getTaskSettings(), instanceOf(CustomElandRerankTaskSettings.class)); + assertTrue(((CustomElandRerankTaskSettings) model.getTaskSettings()).returnDocuments()); + } + } + private ElasticsearchInternalService createService(Client client) { var context = new InferenceServiceExtension.InferenceServiceFactoryContext(client); return new ElasticsearchInternalService(context); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextSimilarityProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextSimilarityProcessor.java index 525d3adba745..c7074f8e7285 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextSimilarityProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextSimilarityProcessor.java @@ -87,7 +87,7 @@ record ResultProcessor(String question, String resultsField, TextSimilarityConfi @Override public InferenceResults processResult(TokenizationResult tokenization, PyTorchInferenceResult pyTorchResult, boolean chunkResult) { if (chunkResult) { - throw chunkingNotSupportedException(TaskType.NER); + throw chunkingNotSupportedException(TaskType.TEXT_SIMILARITY); } if (pyTorchResult.getInferenceResult().length < 1) { From d87837680a95e584c0550c482b7ab9e76e294c19 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Mon, 13 May 2024 14:24:13 +0300 Subject: [PATCH 082/119] Include field name in SyntheticFieldLoader (#108492) * Track source for arrays of objects * Update docs/changelog/108417.yaml * add missing field name * add missing field name * Include field name in SyntheticFieldLoader * revert remnant change --- .../BinaryDocValuesSyntheticFieldLoader.java | 5 +++++ .../index/mapper/DocCountFieldMapper.java | 5 +++++ .../elasticsearch/index/mapper/ObjectMapper.java | 5 +++++ ...ortedNumericDocValuesSyntheticFieldLoader.java | 5 +++++ .../SortedSetDocValuesSyntheticFieldLoader.java | 5 +++++ .../elasticsearch/index/mapper/SourceLoader.java | 15 +++++++++++++++ .../mapper/StringStoredFieldFieldLoader.java | 5 +++++ .../mapper/vectors/DenseVectorFieldMapper.java | 10 ++++++++++ .../analytics/mapper/HistogramFieldMapper.java | 5 +++++ .../mapper/AggregateDoubleMetricFieldMapper.java | 5 +++++ .../mapper/ConstantKeywordFieldMapper.java | 5 +++++ .../wildcard/mapper/WildcardFieldMapper.java | 5 +++++ 12 files changed, 75 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BinaryDocValuesSyntheticFieldLoader.java b/server/src/main/java/org/elasticsearch/index/mapper/BinaryDocValuesSyntheticFieldLoader.java index 0e6f117266e3..c3eb0c4c0290 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BinaryDocValuesSyntheticFieldLoader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BinaryDocValuesSyntheticFieldLoader.java @@ -58,4 +58,9 @@ public void write(XContentBuilder b) throws IOException { writeValue(b, values.binaryValue()); } + + @Override + public String fieldName() { + return name; + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocCountFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DocCountFieldMapper.java index af341e64661d..a7283cf0a28e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocCountFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocCountFieldMapper.java @@ -178,5 +178,10 @@ public void write(XContentBuilder b) throws IOException { } b.field(NAME, postings.freq()); } + + @Override + public String fieldName() { + return NAME; + } } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java index 6d5a43ae41bd..2d432670936c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java @@ -830,6 +830,11 @@ public boolean setIgnoredValues(Map> storedFieldLoaders() { if (storedValuesName == null) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceLoader.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceLoader.java index d8879338bea1..dea3494f408d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceLoader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceLoader.java @@ -215,6 +215,11 @@ public boolean hasValue() { @Override public void write(XContentBuilder b) {} + + @Override + public String fieldName() { + return ""; + } }; /** @@ -242,10 +247,20 @@ public void write(XContentBuilder b) {} */ void write(XContentBuilder b) throws IOException; + /** + * Allows for identifying and tracking additional field values to include in the field source. + * @param objectsWithIgnoredFields maps object names to lists of fields they contain with special source handling + * @return true if any matching fields are identified + */ default boolean setIgnoredValues(Map> objectsWithIgnoredFields) { return false; } + /** + * Returns the canonical field name for this loader. + */ + String fieldName(); + /** * Sync for stored field values. */ diff --git a/server/src/main/java/org/elasticsearch/index/mapper/StringStoredFieldFieldLoader.java b/server/src/main/java/org/elasticsearch/index/mapper/StringStoredFieldFieldLoader.java index 6ae7c5f20233..b26aed11233f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/StringStoredFieldFieldLoader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/StringStoredFieldFieldLoader.java @@ -89,4 +89,9 @@ public final void write(XContentBuilder b) throws IOException { public final DocValuesLoader docValuesLoader(LeafReader reader, int[] docIdsInLeaf) throws IOException { return null; } + + @Override + public String fieldName() { + return name; + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index 5159a76206ef..9ecd68ec2780 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -1671,6 +1671,11 @@ public void write(XContentBuilder b) throws IOException { } b.endArray(); } + + @Override + public String fieldName() { + return name(); + } } private class DocValuesSyntheticFieldLoader implements SourceLoader.SyntheticFieldLoader { @@ -1721,5 +1726,10 @@ public void write(XContentBuilder b) throws IOException { } b.endArray(); } + + @Override + public String fieldName() { + return name(); + } } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java index b8e4f77f7da7..0cbe3786fc03 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java @@ -551,6 +551,11 @@ public void write(XContentBuilder b) throws IOException { b.endObject(); } + + @Override + public String fieldName() { + return name(); + } }; } } diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java index 03f1aaf8577c..127ea31fa779 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/mapper/AggregateDoubleMetricFieldMapper.java @@ -705,6 +705,11 @@ protected AggregateMetricSyntheticFieldLoader(String name, String simpleName, En this.metrics = metrics; } + @Override + public String fieldName() { + return name; + } + @Override public Stream> storedFieldLoaders() { return Stream.of(); diff --git a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java index ebf060f520c5..0dc37ab9e725 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java +++ b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java @@ -372,6 +372,11 @@ public void write(XContentBuilder b) throws IOException { b.field(simpleName(), fieldType().value); } } + + @Override + public String fieldName() { + return name(); + } }; } } diff --git a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java index a07544ff68c9..69709d638a77 100644 --- a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java +++ b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java @@ -1061,5 +1061,10 @@ public void write(XContentBuilder b) throws IOException { } storedValues = emptyList(); } + + @Override + public String fieldName() { + return name(); + } } } From b9143339748edbf1de39feadaf5f2c13aed08369 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 13 May 2024 13:38:04 +0100 Subject: [PATCH 083/119] Use random-access reads in `randomReadAndSlice` (#108543) In #107885 we added some checks that we could use a random-access read to repeat the read we just performed. This commit improves coverage of the random-access read case by performing such reads anywhere in the input, not just on the data right behind the current pointer. --- .../lucene/store/ESIndexInputTestCase.java | 53 +++++++++++-------- 1 file changed, 31 insertions(+), 22 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/common/lucene/store/ESIndexInputTestCase.java b/test/framework/src/main/java/org/elasticsearch/common/lucene/store/ESIndexInputTestCase.java index 2b4e7fd4c751..63b7dd88cb44 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/lucene/store/ESIndexInputTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/common/lucene/store/ESIndexInputTestCase.java @@ -74,36 +74,45 @@ protected byte[] randomReadAndSlice(IndexInput indexInput, int length) throws IO switch (readStrategy) { case 0, 1, 2, 3: if (length - readPos >= Long.BYTES && readStrategy <= 0) { - long read = indexInput.readLong(); - ByteBuffer.wrap(output, readPos, Long.BYTES).order(ByteOrder.LITTLE_ENDIAN).putLong(read); + ByteBuffer.wrap(output, readPos, Long.BYTES).order(ByteOrder.LITTLE_ENDIAN).putLong(indexInput.readLong()); readPos += Long.BYTES; - if (indexInput instanceof RandomAccessInput randomAccessInput) { - assertEquals(read, randomAccessInput.readLong(indexInput.getFilePointer() - Long.BYTES)); - indexInput.seek(readPos); - } } else if (length - readPos >= Integer.BYTES && readStrategy <= 1) { - int read = indexInput.readInt(); - ByteBuffer.wrap(output, readPos, Integer.BYTES).order(ByteOrder.LITTLE_ENDIAN).putInt(read); + ByteBuffer.wrap(output, readPos, Integer.BYTES).order(ByteOrder.LITTLE_ENDIAN).putInt(indexInput.readInt()); readPos += Integer.BYTES; - if (indexInput instanceof RandomAccessInput randomAccessInput) { - assertEquals(read, randomAccessInput.readInt(indexInput.getFilePointer() - Integer.BYTES)); - indexInput.seek(readPos); - } } else if (length - readPos >= Short.BYTES && readStrategy <= 2) { - short read = indexInput.readShort(); - ByteBuffer.wrap(output, readPos, Short.BYTES).order(ByteOrder.LITTLE_ENDIAN).putShort(read); + ByteBuffer.wrap(output, readPos, Short.BYTES).order(ByteOrder.LITTLE_ENDIAN).putShort(indexInput.readShort()); readPos += Short.BYTES; - if (indexInput instanceof RandomAccessInput randomAccessInput) { - assertEquals(read, randomAccessInput.readShort(indexInput.getFilePointer() - Short.BYTES)); - indexInput.seek(readPos); - } } else { - byte read = indexInput.readByte(); - output[readPos++] = read; - if (indexInput instanceof RandomAccessInput randomAccessInput) { - assertEquals(read, randomAccessInput.readByte(indexInput.getFilePointer() - 1)); + output[readPos++] = indexInput.readByte(); + } + if (indexInput instanceof RandomAccessInput randomAccessInput && randomBoolean()) { + final var randomAccessReadStart = between(0, length - 1); + final int randomAccessReadEnd; + if (length - randomAccessReadStart >= Long.BYTES && randomBoolean()) { + ByteBuffer.wrap(output, randomAccessReadStart, Long.BYTES) + .order(ByteOrder.LITTLE_ENDIAN) + .putLong(randomAccessInput.readLong(randomAccessReadStart)); + randomAccessReadEnd = randomAccessReadStart + Long.BYTES; + } else if (length - randomAccessReadStart >= Integer.BYTES && randomBoolean()) { + ByteBuffer.wrap(output, randomAccessReadStart, Integer.BYTES) + .order(ByteOrder.LITTLE_ENDIAN) + .putInt(randomAccessInput.readInt(randomAccessReadStart)); + randomAccessReadEnd = randomAccessReadStart + Integer.BYTES; + } else if (length - randomAccessReadStart >= Short.BYTES && randomBoolean()) { + ByteBuffer.wrap(output, randomAccessReadStart, Short.BYTES) + .order(ByteOrder.LITTLE_ENDIAN) + .putShort(randomAccessInput.readShort(randomAccessReadStart)); + randomAccessReadEnd = randomAccessReadStart + Short.BYTES; + } else { + output[randomAccessReadStart] = randomAccessInput.readByte(randomAccessReadStart); + randomAccessReadEnd = randomAccessReadStart + 1; + } + if (randomAccessReadStart <= readPos && readPos <= randomAccessReadEnd && randomBoolean()) { + readPos = between(readPos, randomAccessReadEnd); indexInput.seek(readPos); } + + indexInput.seek(readPos); // BUG these random-access reads shouldn't affect the current position } break; case 4: From 6020bc7e06f13b500738c449c2732c0abae66c08 Mon Sep 17 00:00:00 2001 From: Thomas Neirynck Date: Mon, 13 May 2024 08:42:21 -0400 Subject: [PATCH 084/119] [Docs] Add warning kibana has incomplete support for nested fields (#107971) --- docs/reference/mapping/types/nested.asciidoc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/reference/mapping/types/nested.asciidoc b/docs/reference/mapping/types/nested.asciidoc index 5d6ede6acd5a..6272f4529c5f 100644 --- a/docs/reference/mapping/types/nested.asciidoc +++ b/docs/reference/mapping/types/nested.asciidoc @@ -11,6 +11,8 @@ independently of each other. TIP: When ingesting key-value pairs with a large, arbitrary set of keys, you might consider modeling each key-value pair as its own nested document with `key` and `value` fields. Instead, consider using the <> data type, which maps an entire object as a single field and allows for simple searches over its contents. Nested documents and queries are typically expensive, so using the `flattened` data type for this use case is a better option. +WARNING: Nested fields have incomplete support in Kibana. While they are visible and searchable in Discover, they cannot be used to build visualizations in Lens. + [[nested-arrays-flattening-objects]] ==== How arrays of objects are flattened From 1a8238cf6d8e04521fe8e94d128b7d289bbfbc61 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 13 May 2024 13:44:16 +0100 Subject: [PATCH 085/119] Extract `SAFE_AWAIT_TIMEOUT` constant (#108554) Mainly so we have a place to document why we have a 10s wait in these methods. Co-authored-by: Mikhail Berezovskiy --- .../org/elasticsearch/test/ESTestCase.java | 29 ++++++++++++++++--- 1 file changed, 25 insertions(+), 4 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 83f7fdfe386c..80f9f2abea18 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -2101,9 +2101,24 @@ protected static SecureRandom secureRandomFips(final byte[] seed) throws NoSuchA return secureRandomFips; } + /** + * The timeout used for the various "safe" wait methods such as {@link #safeAwait} and {@link #safeAcquire}. In tests we generally want + * these things to complete almost immediately, but sometimes the CI runner executes things rather slowly so we use {@code 10s} as a + * fairly relaxed definition of "immediately". + *

+ * A well-designed test should not need to wait for anything close to this duration when run in isolation. If you think you need to do + * so, instead seek a better way to write the test such that it does not need to wait for so long. Tests that take multiple seconds to + * complete are a big drag on CI times which slows everyone down. + *

+ * For instance, tests which verify things that require the passage of time ought to simulate this (e.g. using a {@link + * org.elasticsearch.common.util.concurrent.DeterministicTaskQueue}). Excessive busy-waits ought to be replaced by blocking waits (e.g. + * using a {@link CountDownLatch}) which release as soon as the condition is satisfied. + */ + public static final TimeValue SAFE_AWAIT_TIMEOUT = TimeValue.timeValueSeconds(10); + public static void safeAwait(CyclicBarrier barrier) { try { - barrier.await(10, TimeUnit.SECONDS); + barrier.await(SAFE_AWAIT_TIMEOUT.millis(), TimeUnit.MILLISECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); fail(e, "safeAwait: interrupted waiting for CyclicBarrier release"); @@ -2114,7 +2129,10 @@ public static void safeAwait(CyclicBarrier barrier) { public static void safeAwait(CountDownLatch countDownLatch) { try { - assertTrue("safeAwait: CountDownLatch did not reach zero within the timeout", countDownLatch.await(10, TimeUnit.SECONDS)); + assertTrue( + "safeAwait: CountDownLatch did not reach zero within the timeout", + countDownLatch.await(SAFE_AWAIT_TIMEOUT.millis(), TimeUnit.MILLISECONDS) + ); } catch (InterruptedException e) { Thread.currentThread().interrupt(); fail(e, "safeAwait: interrupted waiting for CountDownLatch to reach zero"); @@ -2123,7 +2141,10 @@ public static void safeAwait(CountDownLatch countDownLatch) { public static void safeAcquire(Semaphore semaphore) { try { - assertTrue("safeAcquire: Semaphore did not acquire permit within the timeout", semaphore.tryAcquire(10, TimeUnit.SECONDS)); + assertTrue( + "safeAcquire: Semaphore did not acquire permit within the timeout", + semaphore.tryAcquire(SAFE_AWAIT_TIMEOUT.millis(), TimeUnit.MILLISECONDS) + ); } catch (InterruptedException e) { Thread.currentThread().interrupt(); fail(e, "safeAcquire: interrupted waiting for Semaphore to acquire permit"); @@ -2134,7 +2155,7 @@ public static T safeAwait(SubscribableListener listener) { final var future = new PlainActionFuture(); listener.addListener(future); try { - return future.get(10, TimeUnit.SECONDS); + return future.get(SAFE_AWAIT_TIMEOUT.millis(), TimeUnit.MILLISECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new AssertionError("safeAwait: interrupted waiting for SubscribableListener", e); From 78358452b4f5961d4233fe9180d8fb4f20c625f9 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Mon, 13 May 2024 15:53:32 +0300 Subject: [PATCH 086/119] Add option to track array source in ObjectMapper (#108496) The new option, `store_array_source `, will be used to control when to track the source for arrays of objects within objects that have this option set. This only applies to indexes with synthetic source mode. Related to https://github.com/elastic/elasticsearch/issues/90708 --- .../index/mapper/DocumentParser.java | 10 ++++- .../index/mapper/NestedObjectMapper.java | 2 +- .../index/mapper/ObjectMapper.java | 41 ++++++++++++++++++- .../index/mapper/PassThroughObjectMapper.java | 2 +- .../index/mapper/RootObjectMapper.java | 6 ++- .../FieldAliasMapperValidationTests.java | 10 ++++- .../index/mapper/MappingLookupTests.java | 1 + .../index/mapper/ObjectMapperTests.java | 4 ++ .../index/mapper/RootObjectMapperTests.java | 2 + 9 files changed, 72 insertions(+), 6 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 9476c3e719e0..08421af332fe 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -838,7 +838,15 @@ protected String contentType() { private static class NoOpObjectMapper extends ObjectMapper { NoOpObjectMapper(String name, String fullPath) { - super(name, fullPath, Explicit.IMPLICIT_TRUE, Explicit.IMPLICIT_TRUE, Dynamic.RUNTIME, Collections.emptyMap()); + super( + name, + fullPath, + Explicit.IMPLICIT_TRUE, + Explicit.IMPLICIT_TRUE, + Explicit.IMPLICIT_FALSE, + Dynamic.RUNTIME, + Collections.emptyMap() + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java index 5c2880a4bf76..a8955e46f0ad 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java @@ -149,7 +149,7 @@ public MapperBuilderContext createChildContext(String name, Dynamic dynamic) { String nestedTypePath, Query nestedTypeFilter ) { - super(name, fullPath, enabled, Explicit.IMPLICIT_TRUE, dynamic, mappers); + super(name, fullPath, enabled, Explicit.IMPLICIT_TRUE, Explicit.IMPLICIT_FALSE, dynamic, mappers); this.nestedTypePath = nestedTypePath; this.nestedTypeFilter = nestedTypeFilter; this.includeInParent = includeInParent; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java index 2d432670936c..57572cd3617f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java @@ -37,10 +37,12 @@ public class ObjectMapper extends Mapper { private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(ObjectMapper.class); public static final String CONTENT_TYPE = "object"; + static final String STORE_ARRAY_SOURCE_PARAM = "store_array_source"; public static class Defaults { public static final boolean ENABLED = true; public static final Explicit SUBOBJECTS = Explicit.IMPLICIT_TRUE; + public static final Explicit TRACK_ARRAY_SOURCE = Explicit.IMPLICIT_FALSE; public static final Dynamic DYNAMIC = Dynamic.TRUE; } @@ -78,6 +80,7 @@ static Dynamic getRootDynamic(MappingLookup mappingLookup) { public static class Builder extends Mapper.Builder { protected final Explicit subobjects; protected Explicit enabled = Explicit.IMPLICIT_TRUE; + protected Explicit trackArraySource = Defaults.TRACK_ARRAY_SOURCE; protected Dynamic dynamic; protected final List mappersBuilders = new ArrayList<>(); @@ -91,6 +94,11 @@ public Builder enabled(boolean enabled) { return this; } + public Builder trackArraySource(boolean value) { + this.trackArraySource = Explicit.explicitBoolean(value); + return this; + } + public Builder dynamic(Dynamic dynamic) { this.dynamic = dynamic; return this; @@ -182,6 +190,7 @@ public ObjectMapper build(MapperBuilderContext context) { context.buildFullName(name()), enabled, subobjects, + trackArraySource, dynamic, buildMappers(context.createChildContext(name(), dynamic)) ); @@ -242,6 +251,9 @@ protected static boolean parseObjectOrDocumentTypeProperties( } else if (fieldName.equals("enabled")) { builder.enabled(XContentMapValues.nodeBooleanValue(fieldNode, fieldName + ".enabled")); return true; + } else if (fieldName.equals(STORE_ARRAY_SOURCE_PARAM)) { + builder.trackArraySource(XContentMapValues.nodeBooleanValue(fieldNode, fieldName + ".track_array_source")); + return true; } else if (fieldName.equals("properties")) { if (fieldNode instanceof Collection && ((Collection) fieldNode).isEmpty()) { // nothing to do here, empty (to support "properties: []" case) @@ -369,6 +381,7 @@ private static void validateFieldName(String fieldName, IndexVersion indexCreate protected final Explicit enabled; protected final Explicit subobjects; + protected final Explicit trackArraySource; protected final Dynamic dynamic; protected final Map mappers; @@ -378,6 +391,7 @@ private static void validateFieldName(String fieldName, IndexVersion indexCreate String fullPath, Explicit enabled, Explicit subobjects, + Explicit trackArraySource, Dynamic dynamic, Map mappers ) { @@ -387,6 +401,7 @@ private static void validateFieldName(String fieldName, IndexVersion indexCreate this.fullPath = internFieldName(fullPath); this.enabled = enabled; this.subobjects = subobjects; + this.trackArraySource = trackArraySource; this.dynamic = dynamic; if (mappers == null) { this.mappers = Map.of(); @@ -412,7 +427,7 @@ public ObjectMapper.Builder newBuilder(IndexVersion indexVersionCreated) { * This is typically used in the context of a mapper merge when there's not enough budget to add the entire object. */ ObjectMapper withoutMappers() { - return new ObjectMapper(simpleName(), fullPath, enabled, subobjects, dynamic, Map.of()); + return new ObjectMapper(simpleName(), fullPath, enabled, subobjects, trackArraySource, dynamic, Map.of()); } @Override @@ -454,6 +469,10 @@ public final boolean subobjects() { return subobjects.value(); } + public final boolean trackArraySource() { + return trackArraySource.value(); + } + @Override public void validate(MappingLookup mappers) { for (Mapper mapper : this.mappers.values()) { @@ -480,6 +499,7 @@ public ObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeContex fullPath, mergeResult.enabled, mergeResult.subObjects, + mergeResult.trackArraySource, mergeResult.dynamic, mergeResult.mappers ); @@ -488,6 +508,7 @@ public ObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeContex protected record MergeResult( Explicit enabled, Explicit subObjects, + Explicit trackArraySource, ObjectMapper.Dynamic dynamic, Map mappers ) { @@ -519,11 +540,26 @@ static MergeResult build(ObjectMapper existing, ObjectMapper mergeWithObject, Ma } else { subObjects = existing.subobjects; } + final Explicit trackArraySource; + if (mergeWithObject.trackArraySource.explicit()) { + if (reason == MergeReason.INDEX_TEMPLATE) { + trackArraySource = mergeWithObject.trackArraySource; + } else if (existing.trackArraySource != mergeWithObject.trackArraySource) { + throw new MapperException( + "the [track_array_source] parameter can't be updated for the object mapping [" + existing.name() + "]" + ); + } else { + trackArraySource = existing.trackArraySource; + } + } else { + trackArraySource = existing.trackArraySource; + } MapperMergeContext objectMergeContext = existing.createChildContext(parentMergeContext, existing.simpleName()); Map mergedMappers = buildMergedMappers(existing, mergeWithObject, objectMergeContext, subObjects.value()); return new MergeResult( enabled, subObjects, + trackArraySource, mergeWithObject.dynamic != null ? mergeWithObject.dynamic : existing.dynamic, mergedMappers ); @@ -680,6 +716,9 @@ void toXContent(XContentBuilder builder, Params params, ToXContent custom) throw if (subobjects != Defaults.SUBOBJECTS) { builder.field("subobjects", subobjects.value()); } + if (trackArraySource != Defaults.TRACK_ARRAY_SOURCE) { + builder.field(STORE_ARRAY_SOURCE_PARAM, trackArraySource.value()); + } if (custom != null) { custom.toXContent(builder, params); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java index d44f03d72e21..668237571984 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java @@ -77,7 +77,7 @@ public PassThroughObjectMapper build(MapperBuilderContext context) { Explicit timeSeriesDimensionSubFields ) { // Subobjects are not currently supported. - super(name, fullPath, enabled, Explicit.IMPLICIT_FALSE, dynamic, mappers); + super(name, fullPath, enabled, Explicit.IMPLICIT_FALSE, Explicit.IMPLICIT_FALSE, dynamic, mappers); this.timeSeriesDimensionSubFields = timeSeriesDimensionSubFields; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java index 9e0680e6e6e6..c19809760ec4 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java @@ -117,6 +117,7 @@ public RootObjectMapper build(MapperBuilderContext context) { name(), enabled, subobjects, + trackArraySource, dynamic, mappers, new HashMap<>(runtimeFields), @@ -262,6 +263,7 @@ private static boolean isConflictingObject(Mapper mapper, String[] parts) { String name, Explicit enabled, Explicit subobjects, + Explicit trackArraySource, Dynamic dynamic, Map mappers, Map runtimeFields, @@ -270,7 +272,7 @@ private static boolean isConflictingObject(Mapper mapper, String[] parts) { Explicit dateDetection, Explicit numericDetection ) { - super(name, name, enabled, subobjects, dynamic, mappers); + super(name, name, enabled, subobjects, trackArraySource, dynamic, mappers); this.runtimeFields = runtimeFields; this.dynamicTemplates = dynamicTemplates; this.dynamicDateTimeFormatters = dynamicDateTimeFormatters; @@ -292,6 +294,7 @@ RootObjectMapper withoutMappers() { simpleName(), enabled, subobjects, + trackArraySource, dynamic, Map.of(), Map.of(), @@ -407,6 +410,7 @@ public RootObjectMapper merge(Mapper mergeWith, MapperMergeContext parentMergeCo simpleName(), mergeResult.enabled(), mergeResult.subObjects(), + mergeResult.trackArraySource(), mergeResult.dynamic(), mergeResult.mappers(), Map.copyOf(runtimeFields), diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java index 6df9fd1f35f5..c02df8336a66 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperValidationTests.java @@ -164,7 +164,15 @@ private static FieldMapper createFieldMapper(String parent, String name) { } private static ObjectMapper createObjectMapper(String name) { - return new ObjectMapper(name, name, Explicit.IMPLICIT_TRUE, Explicit.IMPLICIT_TRUE, ObjectMapper.Dynamic.FALSE, emptyMap()); + return new ObjectMapper( + name, + name, + Explicit.IMPLICIT_TRUE, + Explicit.IMPLICIT_TRUE, + Explicit.IMPLICIT_FALSE, + ObjectMapper.Dynamic.FALSE, + emptyMap() + ); } private static NestedObjectMapper createNestedObjectMapper(String name) { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java index 0308dac5fa21..65fa4e236baf 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MappingLookupTests.java @@ -82,6 +82,7 @@ public void testSubfieldOverride() { "object", Explicit.EXPLICIT_TRUE, Explicit.IMPLICIT_TRUE, + Explicit.IMPLICIT_FALSE, ObjectMapper.Dynamic.TRUE, Collections.singletonMap("object.subfield", fieldMapper) ); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java index 154132c77292..69848e3b93f9 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java @@ -165,6 +165,7 @@ public void testMergeEnabledForIndexTemplates() throws IOException { assertNotNull(objectMapper); assertFalse(objectMapper.isEnabled()); assertTrue(objectMapper.subobjects()); + assertFalse(objectMapper.trackArraySource()); // Setting 'enabled' to true is allowed, and updates the mapping. update = Strings.toString( @@ -175,6 +176,7 @@ public void testMergeEnabledForIndexTemplates() throws IOException { .field("type", "object") .field("enabled", true) .field("subobjects", false) + .field(ObjectMapper.STORE_ARRAY_SOURCE_PARAM, true) .endObject() .endObject() .endObject() @@ -185,6 +187,7 @@ public void testMergeEnabledForIndexTemplates() throws IOException { assertNotNull(objectMapper); assertTrue(objectMapper.isEnabled()); assertFalse(objectMapper.subobjects()); + assertTrue(objectMapper.trackArraySource()); } public void testFieldReplacementForIndexTemplates() throws IOException { @@ -573,6 +576,7 @@ private ObjectMapper createObjectMapperWithAllParametersSet(CheckedConsumer Date: Mon, 13 May 2024 15:20:12 +0200 Subject: [PATCH 087/119] [Connector API][Docs] List supported enum values for the list request (#108557) --- .../connector/apis/list-connector-sync-jobs-api.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc b/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc index 410bec7ac38a..6d06e7e6b904 100644 --- a/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc +++ b/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc @@ -30,13 +30,13 @@ Returns information about all stored connector sync jobs ordered by their creati (Optional, integer) The offset from the first result to fetch. Defaults to `0`. `status`:: -(Optional, job status) The job status the fetched sync jobs need to have. +(Optional, job status) A comma-separated list of job statuses to filter the results. Available statuses include: `canceling`, `canceled`, `completed`, `error`, `in_progress`, `pending`, `suspended`. `connector_id`:: (Optional, string) The connector id the fetched sync jobs need to have. `job_type`:: -(Optional, job type) A comma-separated list of job types. +(Optional, job type) A comma-separated list of job types. Available job types are: `full`, `incremental` and `access_control`. [[list-connector-sync-jobs-api-example]] ==== {api-examples-title} From 1445fd2ca80788bad532793f79aa76c008099b43 Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Mon, 13 May 2024 15:37:22 +0200 Subject: [PATCH 088/119] [Inference API] Add CohereUtilsTests (#108546) --- .../request/cohere/CohereUtilsTests.java | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereUtilsTests.java diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereUtilsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereUtilsTests.java new file mode 100644 index 000000000000..47aff8dad65d --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereUtilsTests.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.request.cohere; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.is; + +public class CohereUtilsTests extends ESTestCase { + + public void testCreateRequestSourceHeader() { + var requestSourceHeader = CohereUtils.createRequestSourceHeader(); + + assertThat(requestSourceHeader.getName(), is("Request-Source")); + assertThat(requestSourceHeader.getValue(), is("unspecified:elasticsearch")); + } + +} From 6ecff09193c5f9137ccc673216689ee3f18240fb Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Mon, 13 May 2024 10:11:56 -0400 Subject: [PATCH 089/119] [Transform] Forward indexServiceSafe exceptions to listener (#108517) IndexService.indexServiceSafe can throw an IndexNotFoundException while getting the Global Checkpoints. In theory, any exception in TransportGetCheckpointNodeAction should be forwarded to the listener. Fix #108418 --- docs/changelog/108517.yaml | 6 ++ .../TransportGetCheckpointNodeAction.java | 31 +++++-- ...TransportGetCheckpointNodeActionTests.java | 84 +++++++++++++------ 3 files changed, 85 insertions(+), 36 deletions(-) create mode 100644 docs/changelog/108517.yaml diff --git a/docs/changelog/108517.yaml b/docs/changelog/108517.yaml new file mode 100644 index 000000000000..359c8302fdf6 --- /dev/null +++ b/docs/changelog/108517.yaml @@ -0,0 +1,6 @@ +pr: 108517 +summary: Forward `indexServiceSafe` exception to listener +area: Transform +type: bug +issues: + - 108418 diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeAction.java index 481fe40a764a..177f00c704c3 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeAction.java @@ -6,6 +6,8 @@ */ package org.elasticsearch.xpack.transform.action; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; @@ -34,6 +36,7 @@ public class TransportGetCheckpointNodeAction extends HandledTransportAction { + private static final Logger logger = LogManager.getLogger(TransportGetCheckpointNodeAction.class); private final IndicesService indicesService; @Inject @@ -83,17 +86,27 @@ protected static void getGlobalCheckpoints( return; } } - final IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); - final IndexShard indexShard = indexService.getShard(shardId.id()); - checkpointsByIndexOfThisNode.computeIfAbsent(shardId.getIndexName(), k -> { - long[] seqNumbers = new long[indexService.getIndexSettings().getNumberOfShards()]; - Arrays.fill(seqNumbers, SequenceNumbers.UNASSIGNED_SEQ_NO); - return seqNumbers; - }); - checkpointsByIndexOfThisNode.get(shardId.getIndexName())[shardId.getId()] = indexShard.seqNoStats().getGlobalCheckpoint(); - ++numProcessedShards; + try { + final IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); + final IndexShard indexShard = indexService.getShard(shardId.id()); + + checkpointsByIndexOfThisNode.computeIfAbsent(shardId.getIndexName(), k -> { + long[] seqNumbers = new long[indexService.getIndexSettings().getNumberOfShards()]; + Arrays.fill(seqNumbers, SequenceNumbers.UNASSIGNED_SEQ_NO); + return seqNumbers; + }); + checkpointsByIndexOfThisNode.get(shardId.getIndexName())[shardId.getId()] = indexShard.seqNoStats().getGlobalCheckpoint(); + ++numProcessedShards; + } catch (Exception e) { + logger.atDebug() + .withThrowable(e) + .log("Failed to get checkpoint for shard [{}] and index [{}]", shardId.getId(), shardId.getIndexName()); + listener.onFailure(e); + return; + } } + listener.onResponse(new Response(checkpointsByIndexOfThisNode)); } } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeActionTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeActionTests.java index 25c7f9efa799..950e593165f0 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeActionTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeActionTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.seqno.SeqNoStats; @@ -47,6 +48,8 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -68,35 +71,9 @@ public void setUp() throws Exception { null, (TaskManager) null ); - IndexShard indexShardA0 = mock(IndexShard.class); - when(indexShardA0.seqNoStats()).thenReturn(new SeqNoStats(3_000, 2_000, 3_000)); - IndexShard indexShardA1 = mock(IndexShard.class); - when(indexShardA1.seqNoStats()).thenReturn(new SeqNoStats(3_000, 2_000, 3_001)); - IndexShard indexShardB0 = mock(IndexShard.class); - when(indexShardB0.seqNoStats()).thenReturn(new SeqNoStats(3_000, 2_000, 4_000)); - IndexShard indexShardB1 = mock(IndexShard.class); - when(indexShardB1.seqNoStats()).thenReturn(new SeqNoStats(3_000, 2_000, 4_001)); - Settings commonIndexSettings = Settings.builder() - .put(SETTING_VERSION_CREATED, 1_000_000) - .put(SETTING_NUMBER_OF_SHARDS, 2) - .put(SETTING_NUMBER_OF_REPLICAS, 1) - .build(); - IndexService indexServiceA = mock(IndexService.class); - when(indexServiceA.getIndexSettings()).thenReturn( - new IndexSettings(IndexMetadata.builder("my-index-A").settings(commonIndexSettings).build(), Settings.EMPTY) - ); - when(indexServiceA.getShard(0)).thenReturn(indexShardA0); - when(indexServiceA.getShard(1)).thenReturn(indexShardA1); - IndexService indexServiceB = mock(IndexService.class); - when(indexServiceB.getIndexSettings()).thenReturn( - new IndexSettings(IndexMetadata.builder("my-index-B").settings(commonIndexSettings).build(), Settings.EMPTY) - ); - when(indexServiceB.getShard(0)).thenReturn(indexShardB0); - when(indexServiceB.getShard(1)).thenReturn(indexShardB1); + indicesService = mock(IndicesService.class); when(indicesService.clusterService()).thenReturn(clusterService); - when(indicesService.indexServiceSafe(new Index("my-index-A", "A"))).thenReturn(indexServiceA); - when(indicesService.indexServiceSafe(new Index("my-index-B", "B"))).thenReturn(indexServiceB); task = new CancellableTask(123, "type", "action", "description", new TaskId("dummy-node:456"), Map.of()); clock = new FakeClock(Instant.now()); @@ -117,6 +94,7 @@ public void testGetGlobalCheckpointsWithHighTimeout() throws InterruptedExceptio } private void testGetGlobalCheckpointsSuccess(TimeValue timeout) throws InterruptedException { + mockIndexServiceResponse(); CountDownLatch latch = new CountDownLatch(1); SetOnce responseHolder = new SetOnce<>(); SetOnce exceptionHolder = new SetOnce<>(); @@ -136,7 +114,38 @@ private void testGetGlobalCheckpointsSuccess(TimeValue timeout) throws Interrupt assertThat(exceptionHolder.get(), is(nullValue())); } + private void mockIndexServiceResponse() { + IndexShard indexShardA0 = mock(IndexShard.class); + when(indexShardA0.seqNoStats()).thenReturn(new SeqNoStats(3_000, 2_000, 3_000)); + IndexShard indexShardA1 = mock(IndexShard.class); + when(indexShardA1.seqNoStats()).thenReturn(new SeqNoStats(3_000, 2_000, 3_001)); + IndexShard indexShardB0 = mock(IndexShard.class); + when(indexShardB0.seqNoStats()).thenReturn(new SeqNoStats(3_000, 2_000, 4_000)); + IndexShard indexShardB1 = mock(IndexShard.class); + when(indexShardB1.seqNoStats()).thenReturn(new SeqNoStats(3_000, 2_000, 4_001)); + Settings commonIndexSettings = Settings.builder() + .put(SETTING_VERSION_CREATED, 1_000_000) + .put(SETTING_NUMBER_OF_SHARDS, 2) + .put(SETTING_NUMBER_OF_REPLICAS, 1) + .build(); + IndexService indexServiceA = mock(IndexService.class); + when(indexServiceA.getIndexSettings()).thenReturn( + new IndexSettings(IndexMetadata.builder("my-index-A").settings(commonIndexSettings).build(), Settings.EMPTY) + ); + when(indexServiceA.getShard(0)).thenReturn(indexShardA0); + when(indexServiceA.getShard(1)).thenReturn(indexShardA1); + IndexService indexServiceB = mock(IndexService.class); + when(indexServiceB.getIndexSettings()).thenReturn( + new IndexSettings(IndexMetadata.builder("my-index-B").settings(commonIndexSettings).build(), Settings.EMPTY) + ); + when(indexServiceB.getShard(0)).thenReturn(indexShardB0); + when(indexServiceB.getShard(1)).thenReturn(indexShardB1); + when(indicesService.indexServiceSafe(new Index("my-index-A", "A"))).thenReturn(indexServiceA); + when(indicesService.indexServiceSafe(new Index("my-index-B", "B"))).thenReturn(indexServiceB); + } + public void testGetGlobalCheckpointsFailureDueToTaskCancelled() throws InterruptedException { + mockIndexServiceResponse(); TaskCancelHelper.cancel(task, "due to apocalypse"); CountDownLatch latch = new CountDownLatch(1); @@ -156,6 +165,7 @@ public void testGetGlobalCheckpointsFailureDueToTaskCancelled() throws Interrupt } public void testGetGlobalCheckpointsFailureDueToTimeout() throws InterruptedException { + mockIndexServiceResponse(); // Move the current time past the timeout. clock.advanceTimeBy(Duration.ofSeconds(10)); @@ -184,4 +194,24 @@ public void testGetGlobalCheckpointsFailureDueToTimeout() throws InterruptedExce is(equalTo("Transform checkpointing timed out on node [dummy-node] after [5s] having processed [0] of [4] shards")) ); } + + public void testIndexNotFoundException() throws InterruptedException { + var expectedException = new IndexNotFoundException("some index"); + when(indicesService.indexServiceSafe(any())).thenThrow(expectedException); + + var exceptionHolder = new SetOnce(); + TransportGetCheckpointNodeAction.getGlobalCheckpoints( + indicesService, + task, + shards, + TimeValue.timeValueSeconds(5), + clock, + ActionListener.wrap(r -> { + fail("Test is meant to call the onFailure method."); + }, exceptionHolder::set) + ); + + assertNotNull("Listener's onFailure handler was not called.", exceptionHolder.get()); + assertThat(exceptionHolder.get(), sameInstance(expectedException)); + } } From 7ba6e7a5cc2b85e0d59c229534af3970b9ec9db2 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Mon, 13 May 2024 10:21:09 -0400 Subject: [PATCH 090/119] Remove leading is_ prefix from Enterprise geoip docs (#108518) --- docs/changelog/108518.yaml | 5 +++++ .../elasticsearch/ingest/geoip/GeoIpProcessor.java | 12 ++++++------ .../ingest/geoip/GeoIpProcessorTests.java | 12 ++++++------ 3 files changed, 17 insertions(+), 12 deletions(-) create mode 100644 docs/changelog/108518.yaml diff --git a/docs/changelog/108518.yaml b/docs/changelog/108518.yaml new file mode 100644 index 000000000000..aad823ccc89f --- /dev/null +++ b/docs/changelog/108518.yaml @@ -0,0 +1,5 @@ +pr: 108518 +summary: Remove leading is_ prefix from Enterprise geoip docs +area: Ingest Node +type: bug +issues: [] diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index 2e50cc0a9767..6898e4433579 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -483,22 +483,22 @@ private Map retrieveEnterpriseGeoData(GeoIpDatabase geoIpDatabas } } case HOSTING_PROVIDER -> { - geoData.put("is_hosting_provider", isHostingProvider); + geoData.put("hosting_provider", isHostingProvider); } case TOR_EXIT_NODE -> { - geoData.put("is_tor_exit_node", isTorExitNode); + geoData.put("tor_exit_node", isTorExitNode); } case ANONYMOUS_VPN -> { - geoData.put("is_anonymous_vpn", isAnonymousVpn); + geoData.put("anonymous_vpn", isAnonymousVpn); } case ANONYMOUS -> { - geoData.put("is_anonymous", isAnonymous); + geoData.put("anonymous", isAnonymous); } case PUBLIC_PROXY -> { - geoData.put("is_public_proxy", isPublicProxy); + geoData.put("public_proxy", isPublicProxy); } case RESIDENTIAL_PROXY -> { - geoData.put("is_residential_proxy", isResidentialProxy); + geoData.put("residential_proxy", isResidentialProxy); } } } diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index f9f79d54522d..ec77cacbdb6b 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -373,12 +373,12 @@ public void testEnterprise() throws Exception { location.put("lon", -1.25); assertThat(geoData.get("location"), equalTo(location)); assertThat(geoData.get("network"), equalTo("2.125.160.216/29")); - assertThat(geoData.get("is_hosting_provider"), equalTo(false)); - assertThat(geoData.get("is_tor_exit_node"), equalTo(false)); - assertThat(geoData.get("is_anonymous_vpn"), equalTo(false)); - assertThat(geoData.get("is_anonymous"), equalTo(false)); - assertThat(geoData.get("is_public_proxy"), equalTo(false)); - assertThat(geoData.get("is_residential_proxy"), equalTo(false)); + assertThat(geoData.get("hosting_provider"), equalTo(false)); + assertThat(geoData.get("tor_exit_node"), equalTo(false)); + assertThat(geoData.get("anonymous_vpn"), equalTo(false)); + assertThat(geoData.get("anonymous"), equalTo(false)); + assertThat(geoData.get("public_proxy"), equalTo(false)); + assertThat(geoData.get("residential_proxy"), equalTo(false)); } public void testAddressIsNotInTheDatabase() throws Exception { From ebbdc7a0cedd87d35f07378db256240effc8b04b Mon Sep 17 00:00:00 2001 From: Tim Grein Date: Mon, 13 May 2024 16:27:29 +0200 Subject: [PATCH 091/119] [Inference API] Refactor OpenAiChatCompletionAction only 1 document input (#108551) --- .../action/openai/OpenAiChatCompletionAction.java | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java index 5d75adedddde..e11e9d5ad8cc 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionAction.java @@ -44,16 +44,17 @@ public OpenAiChatCompletionAction(Sender sender, OpenAiChatCompletionModel model @Override public void execute(InferenceInputs inferenceInputs, TimeValue timeout, ActionListener listener) { - if (inferenceInputs instanceof DocumentsOnlyInput docsOnlyInput) { - if (docsOnlyInput.getInputs().size() > 1) { - listener.onFailure(new ElasticsearchStatusException("OpenAI completions only accepts 1 input", RestStatus.BAD_REQUEST)); - return; - } - } else { + if (inferenceInputs instanceof DocumentsOnlyInput == false) { listener.onFailure(new ElasticsearchStatusException("Invalid inference input type", RestStatus.INTERNAL_SERVER_ERROR)); return; } + var docsOnlyInput = (DocumentsOnlyInput) inferenceInputs; + if (docsOnlyInput.getInputs().size() > 1) { + listener.onFailure(new ElasticsearchStatusException("OpenAI completions only accepts 1 input", RestStatus.BAD_REQUEST)); + return; + } + try { ActionListener wrappedListener = wrapFailuresInElasticsearchException(errorMessage, listener); From 05d728e3eff6f0d432bf276d27e8437833e74131 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Slobodan=20Adamovi=C4=87?= Date: Mon, 13 May 2024 16:34:46 +0200 Subject: [PATCH 092/119] [DOCS] Document `manage_inference` and `monitor_inference` cluster privileges (#108553) --- docs/reference/security/authorization/privileges.asciidoc | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/reference/security/authorization/privileges.asciidoc b/docs/reference/security/authorization/privileges.asciidoc index 9153b5fbdcab..be30db4d100b 100644 --- a/docs/reference/security/authorization/privileges.asciidoc +++ b/docs/reference/security/authorization/privileges.asciidoc @@ -85,6 +85,9 @@ All {Ilm} operations related to managing policies. `manage_index_templates`:: All operations on index templates. +`manage_inference`:: +All operations related to managing {infer}. + `manage_ingest_pipelines`:: All operations on ingest pipelines. @@ -192,6 +195,9 @@ node info, node and cluster stats, and pending cluster tasks. `monitor_enrich`:: All read-only operations related to managing and executing enrich policies. +`monitor_inference`:: +All read-only operations related to {infer}. + `monitor_ml`:: All read-only {ml} operations, such as getting information about {dfeeds}, jobs, model snapshots, or results. From b02d06c2d297080d49502ee160f33e11a252250c Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 13 May 2024 07:50:58 -0700 Subject: [PATCH 093/119] Make MockLogAppender itself Releasable (#108526) Existing uses of MockLogAppender first construct an appender, then call capturing on the instance in a try-with-resources block. This commit adds a new method, capture, which creates an appender and sets up the capture the the same time. The intent is that this will replace the existing capturing calls, but there are too many to change in one PR. --- .../transport/netty4/ESLoggingHandlerIT.java | 7 +- .../bootstrap/SpawnerNoBootstrapTests.java | 12 +-- .../cluster/allocation/ClusterRerouteIT.java | 54 ++++++------- .../elasticsearch/test/MockLogAppender.java | 80 ++++++++++++------- .../AbstractSimpleTransportTestCase.java | 3 +- 5 files changed, 88 insertions(+), 68 deletions(-) diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java index d0cef178dc92..aee0d313e4e0 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java @@ -10,7 +10,6 @@ import org.apache.logging.log4j.Level; import org.elasticsearch.ESNetty4IntegTestCase; -import org.elasticsearch.core.Releasable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.MockLogAppender; @@ -24,16 +23,14 @@ public class ESLoggingHandlerIT extends ESNetty4IntegTestCase { private MockLogAppender appender; - private Releasable appenderRelease; public void setUp() throws Exception { super.setUp(); - appender = new MockLogAppender(); - appenderRelease = appender.capturing(ESLoggingHandler.class, TransportLogger.class, TcpTransport.class); + appender = MockLogAppender.capture(ESLoggingHandler.class, TransportLogger.class, TcpTransport.class); } public void tearDown() throws Exception { - appenderRelease.close(); + appender.close(); super.tearDown(); } diff --git a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java index 08e3ac2cbce8..99b2728ebfa3 100644 --- a/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java +++ b/qa/no-bootstrap-tests/src/test/java/org/elasticsearch/bootstrap/SpawnerNoBootstrapTests.java @@ -206,16 +206,16 @@ private void assertControllerSpawns(final Function pluginsDir String stdoutLoggerName = "test_plugin-controller-stdout"; String stderrLoggerName = "test_plugin-controller-stderr"; - MockLogAppender appender = new MockLogAppender(); Loggers.setLevel(LogManager.getLogger(stdoutLoggerName), Level.TRACE); Loggers.setLevel(LogManager.getLogger(stderrLoggerName), Level.TRACE); CountDownLatch messagesLoggedLatch = new CountDownLatch(2); - if (expectSpawn) { - appender.addExpectation(new ExpectedStreamMessage(stdoutLoggerName, "I am alive", messagesLoggedLatch)); - appender.addExpectation(new ExpectedStreamMessage(stderrLoggerName, "I am an error", messagesLoggedLatch)); - } - try (var ignore = appender.capturing(stdoutLoggerName, stderrLoggerName)) { + try (var appender = MockLogAppender.capture(stdoutLoggerName, stderrLoggerName)) { + if (expectSpawn) { + appender.addExpectation(new ExpectedStreamMessage(stdoutLoggerName, "I am alive", messagesLoggedLatch)); + appender.addExpectation(new ExpectedStreamMessage(stderrLoggerName, "I am an error", messagesLoggedLatch)); + } + Spawner spawner = new Spawner(); spawner.spawnNativeControllers(environment); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java index 3b9d3e133b63..fcccc0051f0c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java @@ -387,17 +387,16 @@ public void testMessageLogging() { ) .get(); - MockLogAppender dryRunMockLog = new MockLogAppender(); - dryRunMockLog.addExpectation( - new MockLogAppender.UnseenEventExpectation( - "no completed message logged on dry run", - TransportClusterRerouteAction.class.getName(), - Level.INFO, - "allocated an empty primary*" - ) - ); + try (var dryRunMockLog = MockLogAppender.capture(TransportClusterRerouteAction.class)) { + dryRunMockLog.addExpectation( + new MockLogAppender.UnseenEventExpectation( + "no completed message logged on dry run", + TransportClusterRerouteAction.class.getName(), + Level.INFO, + "allocated an empty primary*" + ) + ); - try (var ignored = dryRunMockLog.capturing(TransportClusterRerouteAction.class)) { AllocationCommand dryRunAllocation = new AllocateEmptyPrimaryAllocationCommand(indexName, 0, nodeName1, true); ClusterRerouteResponse dryRunResponse = clusterAdmin().prepareReroute() .setExplain(randomBoolean()) @@ -412,24 +411,23 @@ public void testMessageLogging() { dryRunMockLog.assertAllExpectationsMatched(); } - MockLogAppender allocateMockLog = new MockLogAppender(); - allocateMockLog.addExpectation( - new MockLogAppender.SeenEventExpectation( - "message for first allocate empty primary", - TransportClusterRerouteAction.class.getName(), - Level.INFO, - "allocated an empty primary*" + nodeName1 + "*" - ) - ); - allocateMockLog.addExpectation( - new MockLogAppender.UnseenEventExpectation( - "no message for second allocate empty primary", - TransportClusterRerouteAction.class.getName(), - Level.INFO, - "allocated an empty primary*" + nodeName2 + "*" - ) - ); - try (var ignored = allocateMockLog.capturing(TransportClusterRerouteAction.class)) { + try (var allocateMockLog = MockLogAppender.capture(TransportClusterRerouteAction.class)) { + allocateMockLog.addExpectation( + new MockLogAppender.SeenEventExpectation( + "message for first allocate empty primary", + TransportClusterRerouteAction.class.getName(), + Level.INFO, + "allocated an empty primary*" + nodeName1 + "*" + ) + ); + allocateMockLog.addExpectation( + new MockLogAppender.UnseenEventExpectation( + "no message for second allocate empty primary", + TransportClusterRerouteAction.class.getName(), + Level.INFO, + "allocated an empty primary*" + nodeName2 + "*" + ) + ); AllocationCommand yesDecisionAllocation = new AllocateEmptyPrimaryAllocationCommand(indexName, 0, nodeName1, true); AllocationCommand noDecisionAllocation = new AllocateEmptyPrimaryAllocationCommand("noexist", 1, nodeName2, true); diff --git a/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java b/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java index bc3723119afa..dd7987642c58 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java +++ b/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java @@ -31,13 +31,35 @@ /** * Test appender that can be used to verify that certain events were logged correctly */ -public class MockLogAppender { +public class MockLogAppender implements Releasable { private static final Map> mockAppenders = new ConcurrentHashMap<>(); private static final RealMockAppender parent = new RealMockAppender(); + // TODO: this can become final once the ctor is made private + private List loggers = List.of(); private final List expectations; private volatile boolean isAlive = true; + @Override + public void close() { + isAlive = false; + for (String logger : loggers) { + mockAppenders.compute(logger, (k, v) -> { + assert v != null; + v.remove(this); + return v.isEmpty() ? null : v; + }); + } + // check that all expectations have been evaluated before this is released + for (WrappedLoggingExpectation expectation : expectations) { + assertThat( + "Method assertMatched() not called on LoggingExpectation instance before release: " + expectation, + expectation.assertMatchedCalled, + is(true) + ); + } + } + private static class RealMockAppender extends AbstractAppender { RealMockAppender() { @@ -71,6 +93,11 @@ public MockLogAppender() { expectations = new CopyOnWriteArrayList<>(); } + private MockLogAppender(List loggers) { + this(); + this.loggers = loggers; + } + /** * Initialize the mock log appender with the log4j system. */ @@ -267,58 +294,57 @@ public String toString() { } } + public Releasable capturing(Class... classes) { + this.loggers = Arrays.stream(classes).map(Class::getCanonicalName).toList(); + addToMockAppenders(this, loggers); + return this; + } + + public Releasable capturing(String... names) { + this.loggers = Arrays.asList(names); + addToMockAppenders(this, loggers); + return this; + } + /** * Adds the list of class loggers to this {@link MockLogAppender}. * * Stops and runs some checks on the {@link MockLogAppender} once the returned object is released. */ - public Releasable capturing(Class... classes) { - return appendToLoggers(Arrays.stream(classes).map(Class::getCanonicalName).toList()); + public static MockLogAppender capture(Class... classes) { + return create(Arrays.stream(classes).map(Class::getCanonicalName).toList()); } /** * Same as above except takes string class names of each logger. */ - public Releasable capturing(String... names) { - return appendToLoggers(Arrays.asList(names)); + public static MockLogAppender capture(String... names) { + return create(Arrays.asList(names)); + } + + private static MockLogAppender create(List loggers) { + MockLogAppender appender = new MockLogAppender(loggers); + addToMockAppenders(appender, loggers); + return appender; } - private Releasable appendToLoggers(List loggers) { + private static void addToMockAppenders(MockLogAppender appender, List loggers) { for (String logger : loggers) { mockAppenders.compute(logger, (k, v) -> { if (v == null) { v = new CopyOnWriteArrayList<>(); } - v.add(this); + v.add(appender); return v; }); } - return () -> { - isAlive = false; - for (String logger : loggers) { - mockAppenders.compute(logger, (k, v) -> { - assert v != null; - v.remove(this); - return v.isEmpty() ? null : v; - }); - } - // check that all expectations have been evaluated before this is released - for (WrappedLoggingExpectation expectation : expectations) { - assertThat( - "Method assertMatched() not called on LoggingExpectation instance before release: " + expectation, - expectation.assertMatchedCalled, - is(true) - ); - } - }; } /** * Executes an action and verifies expectations against the provided logger */ public static void assertThatLogger(Runnable action, Class loggerOwner, MockLogAppender.LoggingExpectation expectation) { - MockLogAppender mockAppender = new MockLogAppender(); - try (var ignored = mockAppender.capturing(loggerOwner)) { + try (var mockAppender = MockLogAppender.capture(loggerOwner)) { mockAppender.addExpectation(expectation); action.run(); mockAppender.assertAllExpectationsMatched(); diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index ee7687398cf7..89d10acb6ec4 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -1319,8 +1319,7 @@ public void handleException(TransportException exp) {} .build() ); - MockLogAppender appender = new MockLogAppender(); - try (var ignored = appender.capturing("org.elasticsearch.transport.TransportService.tracer")) { + try (var appender = MockLogAppender.capture("org.elasticsearch.transport.TransportService.tracer")) { //////////////////////////////////////////////////////////////////////// // tests for included action type "internal:test" From 364a6f29157ee5427aa627e957bbe73962a98896 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 13 May 2024 16:52:46 +0200 Subject: [PATCH 094/119] Fix noisy logging in tests from StartupSelfGeneratedLicenseTask (#105692) This logs endlessly and at error level during some node-restarting tests, making debugging leaks and other test failures just unnecessarily painful. Lets just log debug on node shutdown here. --- .../license/StartupSelfGeneratedLicenseTask.java | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartupSelfGeneratedLicenseTask.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartupSelfGeneratedLicenseTask.java index 8d7dbe77e119..accae7fee596 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartupSelfGeneratedLicenseTask.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/StartupSelfGeneratedLicenseTask.java @@ -8,11 +8,11 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Nullable; import org.elasticsearch.license.internal.TrialLicenseVersion; @@ -29,7 +29,7 @@ public class StartupSelfGeneratedLicenseTask extends ClusterStateUpdateTask { /** * Max number of nodes licensed by generated trial license */ - private int selfGeneratedLicenseMaxNodes = 1000; + private static final int selfGeneratedLicenseMaxNodes = 1000; private final Settings settings; private final Clock clock; @@ -100,7 +100,12 @@ private ClusterState updateLicenseSignature(ClusterState currentState, LicensesM @Override public void onFailure(@Nullable Exception e) { - logger.error((Supplier) () -> "unexpected failure during [" + TASK_SOURCE + "]", e); + var state = clusterService.lifecycleState(); + if (state == Lifecycle.State.STOPPED || state == Lifecycle.State.CLOSED) { + logger.debug("node shutdown during [" + TASK_SOURCE + "]", e); + } else { + logger.error("unexpected failure during [" + TASK_SOURCE + "]", e); + } } private ClusterState extendBasic(ClusterState currentState, LicensesMetadata currentLicenseMetadata) { From e352345c233fc977212d2d6e386ac346c1f9603f Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Mon, 13 May 2024 10:59:01 -0400 Subject: [PATCH 095/119] Ensure we return non-negative scores when scoring scalar dot-products (#108522) closes: https://github.com/elastic/elasticsearch/issues/108339 --- docs/changelog/108522.yaml | 5 ++ .../vec/internal/Int7DotProduct.java | 5 +- .../vec/VectorScorerFactoryTests.java | 46 +++++++++++++++++++ 3 files changed, 54 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/108522.yaml diff --git a/docs/changelog/108522.yaml b/docs/changelog/108522.yaml new file mode 100644 index 000000000000..5bc064d7995e --- /dev/null +++ b/docs/changelog/108522.yaml @@ -0,0 +1,5 @@ +pr: 108522 +summary: Ensure we return non-negative scores when scoring scalar dot-products +area: Vector Search +type: bug +issues: [] diff --git a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7DotProduct.java b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7DotProduct.java index 9b452219bd63..5231bb8e3c67 100644 --- a/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7DotProduct.java +++ b/libs/vec/src/main21/java/org/elasticsearch/vec/internal/Int7DotProduct.java @@ -47,10 +47,11 @@ public float score(int firstOrd, int secondOrd) throws IOException { if (firstSeg != null && secondSeg != null) { int dotProduct = dotProduct7u(firstSeg, secondSeg, length); + assert dotProduct >= 0; float adjustedDistance = dotProduct * scoreCorrectionConstant + firstOffset + secondOffset; - return (1 + adjustedDistance) / 2; + return Math.max((1 + adjustedDistance) / 2, 0f); } else { - return fallbackScore(firstByteOffset, secondByteOffset); + return Math.max(fallbackScore(firstByteOffset, secondByteOffset), 0f); } } } diff --git a/libs/vec/src/test/java/org/elasticsearch/vec/VectorScorerFactoryTests.java b/libs/vec/src/test/java/org/elasticsearch/vec/VectorScorerFactoryTests.java index 246ddaeb2ebc..07d30a887c68 100644 --- a/libs/vec/src/test/java/org/elasticsearch/vec/VectorScorerFactoryTests.java +++ b/libs/vec/src/test/java/org/elasticsearch/vec/VectorScorerFactoryTests.java @@ -28,6 +28,7 @@ import static org.elasticsearch.vec.VectorSimilarityType.EUCLIDEAN; import static org.elasticsearch.vec.VectorSimilarityType.MAXIMUM_INNER_PRODUCT; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; // @com.carrotsearch.randomizedtesting.annotations.Repeat(iterations = 100) public class VectorScorerFactoryTests extends AbstractVectorTestCase { @@ -96,6 +97,51 @@ void testSimpleImpl(long maxChunkSize) throws IOException { } } + public void testNonNegativeDotProduct() throws IOException { + assumeTrue(notSupportedMsg(), supported()); + var factory = AbstractVectorTestCase.factory.get(); + + try (Directory dir = new MMapDirectory(createTempDir(getTestName()), MMapDirectory.DEFAULT_MAX_CHUNK_SIZE)) { + // keep vecs `0` so dot product is `0` + byte[] vec1 = new byte[32]; + byte[] vec2 = new byte[32]; + String fileName = getTestName() + "-32"; + try (IndexOutput out = dir.createOutput(fileName, IOContext.DEFAULT)) { + var negativeOffset = floatToByteArray(-5f); + byte[] bytes = concat(vec1, negativeOffset, vec2, negativeOffset); + out.writeBytes(bytes, 0, bytes.length); + } + try (IndexInput in = dir.openInput(fileName, IOContext.DEFAULT)) { + // dot product + float expected = 0f; // TODO fix in Lucene: https://github.com/apache/lucene/pull/13356 luceneScore(DOT_PRODUCT, vec1, vec2, + // 1, -5, -5); + var scorer = factory.getInt7ScalarQuantizedVectorScorer(32, 2, 1, DOT_PRODUCT, in).get(); + assertThat(scorer.score(0, 1), equalTo(expected)); + assertThat(scorer.score(0, 1), greaterThanOrEqualTo(0f)); + assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(0).score(1), equalTo(expected)); + // max inner product + expected = luceneScore(MAXIMUM_INNER_PRODUCT, vec1, vec2, 1, -5, -5); + scorer = factory.getInt7ScalarQuantizedVectorScorer(32, 2, 1, MAXIMUM_INNER_PRODUCT, in).get(); + assertThat(scorer.score(0, 1), greaterThanOrEqualTo(0f)); + assertThat(scorer.score(0, 1), equalTo(expected)); + assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(0).score(1), equalTo(expected)); + // cosine + expected = 0f; // TODO fix in Lucene: https://github.com/apache/lucene/pull/13356 luceneScore(COSINE, vec1, vec2, 1, -5, + // -5); + scorer = factory.getInt7ScalarQuantizedVectorScorer(32, 2, 1, COSINE, in).get(); + assertThat(scorer.score(0, 1), equalTo(expected)); + assertThat(scorer.score(0, 1), greaterThanOrEqualTo(0f)); + assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(0).score(1), equalTo(expected)); + // euclidean + expected = luceneScore(EUCLIDEAN, vec1, vec2, 1, -5, -5); + scorer = factory.getInt7ScalarQuantizedVectorScorer(32, 2, 1, EUCLIDEAN, in).get(); + assertThat(scorer.score(0, 1), equalTo(expected)); + assertThat(scorer.score(0, 1), greaterThanOrEqualTo(0f)); + assertThat((new VectorScorerSupplierAdapter(scorer)).scorer(0).score(1), equalTo(expected)); + } + } + } + public void testRandom() throws IOException { assumeTrue(notSupportedMsg(), supported()); testRandom(MMapDirectory.DEFAULT_MAX_CHUNK_SIZE, BYTE_ARRAY_RANDOM_INT7_FUNC); From 0b2e5584f70ece2e157d7be47529637c505a7d4c Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 13 May 2024 16:29:53 +0100 Subject: [PATCH 096/119] Make master-node timeout less implicit (#108414) Removes the default constructors for `MasterRequest`, `MasterReadRequest` and `AcknowledgedRequest` in favour of constructors which require subclasses to specify the relevant timeouts. This will avoid bugs like #107857 which are caused by a missing `super()` call. Also deprecates and renames the default to make it clear it should not be used in new code. Relates #107984 --- ...DeleteDataStreamGlobalRetentionAction.java | 4 +- .../DeleteDataStreamLifecycleAction.java | 1 + .../GetDataStreamGlobalRetentionAction.java | 4 +- .../GetDataStreamLifecycleStatsAction.java | 4 +- .../PutDataStreamGlobalRetentionAction.java | 1 + .../ClusterAllocationExplainRequest.java | 2 + .../allocation/DesiredBalanceRequest.java | 4 +- .../TransportGetAllocationStatsAction.java | 1 + .../AddVotingConfigExclusionsRequest.java | 1 + .../ClearVotingConfigExclusionsRequest.java | 4 +- .../desirednodes/GetDesiredNodesAction.java | 4 +- .../TransportDeleteDesiredNodesAction.java | 4 +- .../UpdateDesiredNodesRequest.java | 1 + .../cluster/health/ClusterHealthRequest.java | 5 ++- .../GetFeatureUpgradeStatusRequest.java | 2 +- .../migration/PostFeatureUpgradeRequest.java | 2 +- .../PrevalidateNodeRemovalRequest.java | 1 + .../cleanup/CleanupRepositoryRequest.java | 2 + .../delete/DeleteRepositoryRequest.java | 5 ++- .../get/GetRepositoriesRequest.java | 5 ++- .../put/PutRepositoryRequest.java | 5 ++- .../verify/VerifyRepositoryRequest.java | 5 ++- .../reroute/ClusterRerouteRequest.java | 4 +- .../settings/ClusterGetSettingsAction.java | 4 +- .../ClusterUpdateSettingsRequest.java | 4 +- .../shards/ClusterSearchShardsRequest.java | 5 ++- .../snapshots/clone/CloneSnapshotRequest.java | 1 + .../create/CreateSnapshotRequest.java | 5 ++- .../delete/DeleteSnapshotRequest.java | 1 + .../GetSnapshottableFeaturesRequest.java | 1 + .../features/ResetFeatureStateRequest.java | 4 +- .../snapshots/get/GetSnapshotsRequest.java | 6 ++- .../get/shard/GetShardSnapshotRequest.java | 1 + .../restore/RestoreSnapshotRequest.java | 5 ++- .../status/SnapshotsStatusRequest.java | 5 ++- .../cluster/state/ClusterStateRequest.java | 4 +- .../DeleteStoredScriptRequest.java | 4 +- .../storedscripts/GetStoredScriptRequest.java | 4 +- .../storedscripts/PutStoredScriptRequest.java | 4 +- .../tasks/PendingClusterTasksRequest.java | 4 +- .../indices/alias/IndicesAliasesRequest.java | 4 +- .../indices/alias/get/GetAliasesRequest.java | 5 ++- .../indices/close/CloseIndexRequest.java | 5 ++- .../indices/create/CreateIndexRequest.java | 5 ++- .../delete/DeleteDanglingIndexRequest.java | 2 +- .../ImportDanglingIndexRequest.java | 2 +- .../indices/delete/DeleteIndexRequest.java | 6 ++- .../mapping/put/PutMappingRequest.java | 5 ++- .../admin/indices/open/OpenIndexRequest.java | 5 ++- .../readonly/AddIndexBlockRequest.java | 1 + .../indices/rollover/RolloverRequest.java | 5 ++- .../settings/get/GetSettingsRequest.java | 4 +- .../settings/put/UpdateSettingsRequest.java | 6 ++- .../shards/IndicesShardStoresRequest.java | 5 ++- .../admin/indices/shrink/ResizeRequest.java | 5 ++- .../delete/DeleteIndexTemplateRequest.java | 5 ++- ...ransportDeleteComponentTemplateAction.java | 1 + ...rtDeleteComposableIndexTemplateAction.java | 1 + .../get/GetComponentTemplateAction.java | 5 ++- .../get/GetComposableIndexTemplateAction.java | 1 + .../get/GetIndexTemplatesRequest.java | 1 + .../post/SimulateIndexTemplateRequest.java | 1 + .../template/post/SimulateTemplateAction.java | 6 ++- .../put/PutComponentTemplateAction.java | 1 + .../template/put/PutIndexTemplateRequest.java | 5 ++- ...sportPutComposableIndexTemplateAction.java | 1 + .../datastreams/CreateDataStreamAction.java | 2 + .../datastreams/DeleteDataStreamAction.java | 1 + .../datastreams/GetDataStreamAction.java | 2 + .../MigrateToDataStreamAction.java | 1 + .../datastreams/ModifyDataStreamsAction.java | 1 + .../datastreams/PromoteDataStreamAction.java | 1 + .../ExplainDataStreamLifecycleAction.java | 1 + .../GetDataStreamLifecycleAction.java | 2 + .../PutDataStreamLifecycleAction.java | 2 + .../action/downsample/DownsampleAction.java | 5 ++- .../action/ingest/DeletePipelineRequest.java | 1 + .../action/ingest/GetPipelineRequest.java | 1 + .../action/ingest/PutPipelineRequest.java | 1 + .../support/master/AcknowledgedRequest.java | 29 +++++++++---- .../support/master/MasterNodeReadRequest.java | 16 ++++++- .../support/master/MasterNodeRequest.java | 40 ++++++++++++++--- .../master/info/ClusterInfoRequest.java | 5 ++- .../MetadataIndexTemplateService.java | 4 +- .../CompletionPersistentTaskAction.java | 5 ++- .../RemovePersistentTaskAction.java | 5 ++- .../persistent/StartPersistentTaskAction.java | 5 ++- .../UpdatePersistentTaskStatusAction.java | 5 ++- .../reroute/ClusterRerouteRequestTests.java | 4 +- .../TransportMasterNodeActionTests.java | 4 +- .../InternalOrPrivateSettingsPlugin.java | 4 +- .../ReservedClusterStateHandlerTests.java | 5 +++ .../action/DeleteAutoscalingPolicyAction.java | 1 + .../action/GetAutoscalingCapacityAction.java | 1 + .../action/GetAutoscalingPolicyAction.java | 1 + .../action/PutAutoscalingPolicyAction.java | 1 + .../license/GetBasicStatusRequest.java | 4 +- .../license/GetTrialStatusRequest.java | 4 +- .../license/PostStartBasicRequest.java | 4 +- .../license/PostStartTrialRequest.java | 4 +- .../license/PutLicenseRequest.java | 4 +- .../protocol/xpack/XPackUsageRequest.java | 4 +- .../protocol/xpack/frozen/FreezeRequest.java | 1 + .../xpack/license/GetLicenseRequest.java | 4 +- .../action/MigrateToDataTiersRequest.java | 1 + .../action/SetResetModeActionRequest.java | 1 + .../ActivateAutoFollowPatternAction.java | 1 + .../xpack/core/ccr/action/CcrStatsAction.java | 4 +- .../action/DeleteAutoFollowPatternAction.java | 1 + .../core/ccr/action/FollowInfoAction.java | 4 +- .../action/GetAutoFollowPatternAction.java | 4 +- .../core/ccr/action/PauseFollowAction.java | 1 + .../action/PutAutoFollowPatternAction.java | 4 +- .../core/ccr/action/PutFollowAction.java | 4 +- .../core/ccr/action/ResumeFollowAction.java | 4 +- .../xpack/core/ccr/action/UnfollowAction.java | 1 + .../action/DeleteEnrichPolicyAction.java | 1 + .../core/enrich/action/EnrichStatsAction.java | 4 +- .../action/ExecuteEnrichPolicyAction.java | 1 + .../enrich/action/GetEnrichPolicyAction.java | 2 + .../enrich/action/PutEnrichPolicyAction.java | 1 + .../xpack/core/ilm/StartILMRequest.java | 4 +- .../xpack/core/ilm/StopILMRequest.java | 4 +- .../ilm/action/DeleteLifecycleAction.java | 5 ++- .../core/ilm/action/GetLifecycleAction.java | 2 + .../core/ilm/action/PutLifecycleRequest.java | 5 ++- .../RemoveIndexLifecyclePolicyAction.java | 5 ++- .../action/DeleteInferenceModelAction.java | 1 + .../action/GetInferenceModelAction.java | 1 + .../action/PutInferenceModelAction.java | 1 + .../CreateTrainedModelAssignmentAction.java | 1 + .../core/ml/action/DeleteCalendarAction.java | 1 + .../ml/action/DeleteCalendarEventAction.java | 1 + .../DeleteDataFrameAnalyticsAction.java | 1 + .../core/ml/action/DeleteDatafeedAction.java | 1 + .../core/ml/action/DeleteFilterAction.java | 1 + .../core/ml/action/DeleteForecastAction.java | 1 + .../xpack/core/ml/action/DeleteJobAction.java | 1 + .../ml/action/DeleteTrainedModelAction.java | 1 + .../action/DeleteTrainedModelAliasAction.java | 1 + .../DeleteTrainedModelAssignmentAction.java | 1 + .../ExplainDataFrameAnalyticsAction.java | 1 + .../ml/action/FinalizeJobExecutionAction.java | 1 + .../action/FlushTrainedModelCacheAction.java | 4 +- .../core/ml/action/GetDatafeedsAction.java | 1 + .../ml/action/GetDatafeedsStatsAction.java | 1 + ...etJobModelSnapshotsUpgradeStatsAction.java | 1 + .../xpack/core/ml/action/GetJobsAction.java | 1 + .../core/ml/action/GetMlAutoscalingStats.java | 2 +- .../xpack/core/ml/action/MlMemoryAction.java | 1 + .../xpack/core/ml/action/OpenJobAction.java | 2 + .../action/PutDataFrameAnalyticsAction.java | 1 + .../core/ml/action/PutDatafeedAction.java | 1 + .../xpack/core/ml/action/PutJobAction.java | 1 + .../core/ml/action/PutTrainedModelAction.java | 1 + .../ml/action/PutTrainedModelAliasAction.java | 1 + .../PutTrainedModelDefinitionPartAction.java | 1 + .../PutTrainedModelVocabularyAction.java | 1 + .../xpack/core/ml/action/ResetJobAction.java | 1 + .../ml/action/RevertModelSnapshotAction.java | 5 ++- .../core/ml/action/SetUpgradeModeAction.java | 1 + .../action/StartDataFrameAnalyticsAction.java | 5 ++- .../core/ml/action/StartDatafeedAction.java | 3 ++ .../StartTrainedModelDeploymentAction.java | 5 ++- .../UpdateDataFrameAnalyticsAction.java | 1 + .../core/ml/action/UpdateDatafeedAction.java | 1 + .../xpack/core/ml/action/UpdateJobAction.java | 1 + ...ainedModelAssignmentRoutingInfoAction.java | 1 + .../UpdateTrainedModelDeploymentAction.java | 5 ++- .../action/UpgradeJobModelSnapshotAction.java | 1 + .../GetTrainedModelPackageConfigAction.java | 2 + .../action/LoadTrainedModelPackageAction.java | 1 + .../MonitoringMigrateAlertsRequest.java | 4 +- .../rollup/action/PutRollupJobAction.java | 2 + .../MountSearchableSnapshotRequest.java | 1 + .../settings/GetSecuritySettingsAction.java | 8 +++- .../UpdateSecuritySettingsAction.java | 2 + .../action/DeleteSnapshotLifecycleAction.java | 5 ++- .../ExecuteSnapshotLifecycleAction.java | 5 ++- .../ExecuteSnapshotRetentionAction.java | 4 +- .../action/GetSnapshotLifecycleAction.java | 2 + .../action/PutSnapshotLifecycleAction.java | 5 ++- .../xpack/core/slm/action/StartSLMAction.java | 4 +- .../xpack/core/slm/action/StopSLMAction.java | 4 +- .../action/DeleteTransformAction.java | 2 +- .../action/PreviewTransformAction.java | 2 +- .../transform/action/PutTransformAction.java | 2 +- .../action/ResetTransformAction.java | 2 +- .../action/StartTransformAction.java | 2 +- .../action/UpgradeTransformsAction.java | 2 +- .../action/ValidateTransformAction.java | 2 +- .../actions/put/GetWatcherSettingsAction.java | 8 +++- .../put/UpdateWatcherSettingsAction.java | 2 + .../service/WatcherServiceRequest.java | 4 +- .../ElasticsearchMappingsTests.java | 4 +- .../core/ml/utils/MlIndexAndAliasTests.java | 3 +- .../deprecation/DeprecationInfoAction.java | 1 + .../DeleteAnalyticsCollectionAction.java | 1 + .../action/GetAnalyticsCollectionAction.java | 1 + .../action/PutAnalyticsCollectionAction.java | 1 + .../ilm/action/TransportMoveToStepAction.java | 5 ++- .../ilm/action/TransportRetryAction.java | 5 ++- .../integration/ModelSnapshotRetentionIT.java | 3 +- .../ml/integration/ModelSnapshotSearchIT.java | 4 +- .../AutodetectResultProcessorIT.java | 3 +- .../ml/integration/JobResultsProviderIT.java | 3 +- .../ml/integration/UnusedStatsRemoverIT.java | 4 +- .../xpack/ml/MlInitializationService.java | 2 +- .../inference/TrainedModelStatsService.java | 4 +- .../AutodetectProcessManagerTests.java | 43 +++++++++---------- .../profiling/action/GetStatusAction.java | 4 +- .../shutdown/DeleteShutdownNodeAction.java | 2 + .../shutdown/GetShutdownStatusAction.java | 1 + .../xpack/shutdown/PutShutdownNodeAction.java | 2 + 214 files changed, 549 insertions(+), 165 deletions(-) diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamGlobalRetentionAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamGlobalRetentionAction.java index a6060923bd39..e3cdd6a8c14d 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamGlobalRetentionAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamGlobalRetentionAction.java @@ -64,7 +64,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(dryRun); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public boolean dryRun() { return dryRun; diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamLifecycleAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamLifecycleAction.java index 3fe9ae0758a9..3bd100a106dd 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamLifecycleAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/DeleteDataStreamLifecycleAction.java @@ -48,6 +48,7 @@ public void writeTo(StreamOutput out) throws IOException { } public Request(String[] names) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.names = names; } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamGlobalRetentionAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamGlobalRetentionAction.java index 51eb9e7e7e94..5816823ed710 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamGlobalRetentionAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamGlobalRetentionAction.java @@ -47,7 +47,9 @@ private GetDataStreamGlobalRetentionAction() {/* no instances */} public static final class Request extends MasterNodeReadRequest { - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleStatsAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleStatsAction.java index a30af402a918..cc61c7fe664b 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleStatsAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/GetDataStreamLifecycleStatsAction.java @@ -43,7 +43,9 @@ public Request(StreamInput in) throws IOException { super(in); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } @Override public ActionRequestValidationException validate() { diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/PutDataStreamGlobalRetentionAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/PutDataStreamGlobalRetentionAction.java index 2aa5b4b4d3ac..65ca34a99da2 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/PutDataStreamGlobalRetentionAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/action/PutDataStreamGlobalRetentionAction.java @@ -108,6 +108,7 @@ public void writeTo(StreamOutput out) throws IOException { } public Request(@Nullable TimeValue defaultRetention, @Nullable TimeValue maxRetention) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.globalRetention = new DataStreamGlobalRetention(defaultRetention, maxRetention); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainRequest.java index e6de1faa1aff..91561814fea1 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainRequest.java @@ -49,6 +49,7 @@ public class ClusterAllocationExplainRequest extends MasterNodeRequest { - public DesiredBalanceRequest() {} + public DesiredBalanceRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public DesiredBalanceRequest(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsAction.java index 75434ff554b9..f26921fd4726 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetAllocationStatsAction.java @@ -103,6 +103,7 @@ protected ClusterBlockException checkBlock(Request request, ClusterState state) public static class Request extends MasterNodeReadRequest { public Request(TaskId parentTaskId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); setParentTask(parentTaskId); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsRequest.java index 75877cf0630f..82e4e4123e4f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/AddVotingConfigExclusionsRequest.java @@ -57,6 +57,7 @@ public AddVotingConfigExclusionsRequest(String... nodeNames) { * @param timeout How long to wait for the added exclusions to take effect and be removed from the voting configuration. */ public AddVotingConfigExclusionsRequest(String[] nodeIds, String[] nodeNames, TimeValue timeout) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); if (timeout.compareTo(TimeValue.ZERO) < 0) { throw new IllegalArgumentException("timeout [" + timeout + "] must be non-negative"); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/ClearVotingConfigExclusionsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/ClearVotingConfigExclusionsRequest.java index f8f64edad297..2ddd27261db0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/ClearVotingConfigExclusionsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/configuration/ClearVotingConfigExclusionsRequest.java @@ -26,7 +26,9 @@ public class ClearVotingConfigExclusionsRequest extends MasterNodeRequest { - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportDeleteDesiredNodesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportDeleteDesiredNodesAction.java index e6e2616e6766..46e41d306cef 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportDeleteDesiredNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/TransportDeleteDesiredNodesAction.java @@ -102,7 +102,9 @@ public ClusterState afterBatchExecution(ClusterState clusterState, boolean clust } public static class Request extends AcknowledgedRequest { - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesRequest.java index c7c2b9a290a2..3d8cdb4b405f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/UpdateDesiredNodesRequest.java @@ -48,6 +48,7 @@ public class UpdateDesiredNodesRequest extends AcknowledgedRequest nodes, boolean dryRun) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); assert historyID != null; assert nodes != null; this.historyID = historyID; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequest.java index a94555f1dfd1..2b60e2d4a5ff 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthRequest.java @@ -37,9 +37,12 @@ public class ClusterHealthRequest extends MasterNodeReadRequest { public GetFeatureUpgradeStatusRequest() { - super(); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); } public GetFeatureUpgradeStatusRequest(StreamInput in) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/PostFeatureUpgradeRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/PostFeatureUpgradeRequest.java index ccc4a62a1138..36a90ae9afe3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/PostFeatureUpgradeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/PostFeatureUpgradeRequest.java @@ -20,7 +20,7 @@ public class PostFeatureUpgradeRequest extends MasterNodeRequest { public PostFeatureUpgradeRequest() { - super(); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); } public PostFeatureUpgradeRequest(StreamInput in) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/shutdown/PrevalidateNodeRemovalRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/shutdown/PrevalidateNodeRemovalRequest.java index a88fb83b2300..5bde01195e35 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/shutdown/PrevalidateNodeRemovalRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/shutdown/PrevalidateNodeRemovalRequest.java @@ -34,6 +34,7 @@ public class PrevalidateNodeRemovalRequest extends MasterNodeReadRequest { - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java index 5b49a41ed947..c4e40f1b208b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequest.java @@ -55,7 +55,9 @@ public ClusterUpdateSettingsRequest(StreamInput in) throws IOException { persistentSettings = readSettingsFromStream(in); } - public ClusterUpdateSettingsRequest() {} + public ClusterUpdateSettingsRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } @Override public ActionRequestValidationException validate() { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java index 6f6253491c58..91c302c8aa7b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java @@ -31,9 +31,12 @@ public final class ClusterSearchShardsRequest extends MasterNodeReadRequest userMetadata; - public CreateSnapshotRequest() {} + public CreateSnapshotRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } /** * Constructs a new put repository request with the provided snapshot and repository names @@ -87,6 +89,7 @@ public CreateSnapshotRequest() {} * @param snapshot snapshot name */ public CreateSnapshotRequest(String repository, String snapshot) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.snapshot = snapshot; this.repository = repository; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequest.java index b16041da66bf..67389ea3116d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotRequest.java @@ -38,6 +38,7 @@ public class DeleteSnapshotRequest extends MasterNodeRequest private boolean includeIndexNames = true; - public GetSnapshotsRequest() {} + public GetSnapshotsRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } /** * Constructs a new get snapshots request with given repository names and list of snapshots @@ -85,6 +87,7 @@ public GetSnapshotsRequest() {} * @param snapshots list of snapshots */ public GetSnapshotsRequest(String[] repositories, String[] snapshots) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.repositories = repositories; this.snapshots = snapshots; } @@ -95,6 +98,7 @@ public GetSnapshotsRequest(String[] repositories, String[] snapshots) { * @param repositories repository names */ public GetSnapshotsRequest(String... repositories) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.repositories = repositories; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/shard/GetShardSnapshotRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/shard/GetShardSnapshotRequest.java index d8fd55451cc6..7a7cc0c30455 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/shard/GetShardSnapshotRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/shard/GetShardSnapshotRequest.java @@ -29,6 +29,7 @@ public class GetShardSnapshotRequest extends MasterNodeRequest repositories, ShardId shardId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); assert repositories.isEmpty() == false; assert repositories.stream().noneMatch(Objects::isNull); assert repositories.size() == 1 || repositories.stream().noneMatch(repo -> repo.equals(ALL_REPOSITORIES)); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java index 8d025653d47f..73339cedb96e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java @@ -60,7 +60,9 @@ public class RestoreSnapshotRequest extends MasterNodeRequest { - public PendingClusterTasksRequest() {} + public PendingClusterTasksRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public PendingClusterTasksRequest(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java index fac2006b6881..f223d7fb2762 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java @@ -65,7 +65,9 @@ public IndicesAliasesRequest(StreamInput in) throws IOException { origin = in.readOptionalString(); } - public IndicesAliasesRequest() {} + public IndicesAliasesRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } /** * Request to take one or more actions on one or more indexes and alias combinations. diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesRequest.java index 9d10065c9c3e..09071f2e6ea3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesRequest.java @@ -34,11 +34,14 @@ public class GetAliasesRequest extends MasterNodeReadRequest private String[] originalAliases = Strings.EMPTY_ARRAY; public GetAliasesRequest(String... aliases) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.aliases = aliases; this.originalAliases = aliases; } - public GetAliasesRequest() {} + public GetAliasesRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } /** * NB prior to 8.12 get-aliases was a TransportMasterNodeReadAction so for BwC we must remain able to read these requests until we no diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexRequest.java index 9427a5fa363b..9a722f1bce2a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexRequest.java @@ -37,12 +37,15 @@ public CloseIndexRequest(StreamInput in) throws IOException { waitForActiveShards = ActiveShardCount.readFrom(in); } - public CloseIndexRequest() {} + public CloseIndexRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } /** * Constructs a new close index request for the specified index. */ public CloseIndexRequest(String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indices = indices; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java index 21f187f05258..3a78738ae986 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java @@ -111,7 +111,9 @@ public CreateIndexRequest(StreamInput in) throws IOException { } } - public CreateIndexRequest() {} + public CreateIndexRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } /** * Constructs a request to create an index. @@ -129,6 +131,7 @@ public CreateIndexRequest(String index) { * @param settings the settings to apply to the index */ public CreateIndexRequest(String index, Settings settings) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.index = index; this.settings = settings; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/delete/DeleteDanglingIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/delete/DeleteDanglingIndexRequest.java index b8206cba8de2..daceeece4f97 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/delete/DeleteDanglingIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/delete/DeleteDanglingIndexRequest.java @@ -30,7 +30,7 @@ public DeleteDanglingIndexRequest(StreamInput in) throws IOException { } public DeleteDanglingIndexRequest(String indexUUID, boolean acceptDataLoss) { - super(); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indexUUID = Objects.requireNonNull(indexUUID, "indexUUID cannot be null"); this.acceptDataLoss = acceptDataLoss; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/import_index/ImportDanglingIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/import_index/ImportDanglingIndexRequest.java index 66378ab9907d..be2fb1082166 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/import_index/ImportDanglingIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/dangling/import_index/ImportDanglingIndexRequest.java @@ -32,7 +32,7 @@ public ImportDanglingIndexRequest(StreamInput in) throws IOException { } public ImportDanglingIndexRequest(String indexUUID, boolean acceptDataLoss) { - super(); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indexUUID = Objects.requireNonNull(indexUUID, "indexUUID cannot be null"); this.acceptDataLoss = acceptDataLoss; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexRequest.java index 87cfc303a289..2cb431577242 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexRequest.java @@ -48,7 +48,9 @@ public DeleteIndexRequest(StreamInput in) throws IOException { indicesOptions = IndicesOptions.readIndicesOptions(in); } - public DeleteIndexRequest() {} + public DeleteIndexRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } /** * Constructs a new delete index request for the specified index. @@ -56,6 +58,7 @@ public DeleteIndexRequest() {} * @param index The index to delete. Use "_all" to delete all indices. */ public DeleteIndexRequest(String index) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indices = new String[] { index }; } @@ -65,6 +68,7 @@ public DeleteIndexRequest(String index) { * @param indices The indices to delete. Use "_all" to delete all indices. */ public DeleteIndexRequest(String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indices = indices; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java index edc638143863..707286801cf6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java @@ -108,13 +108,16 @@ public PutMappingRequest(StreamInput in) throws IOException { writeIndexOnly = in.readBoolean(); } - public PutMappingRequest() {} + public PutMappingRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } /** * Constructs a new put mapping request against one or more indices. If nothing is set then * it will be executed against all indices. */ public PutMappingRequest(String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indices = indices; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexRequest.java index fb0745eb72d1..4bb4578f2445 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexRequest.java @@ -39,12 +39,15 @@ public OpenIndexRequest(StreamInput in) throws IOException { waitForActiveShards = ActiveShardCount.readFrom(in); } - public OpenIndexRequest() {} + public OpenIndexRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } /** * Constructs a new open index request for the specified index. */ public OpenIndexRequest(String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indices = indices; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockRequest.java index caf33a541e92..9331d7010a6e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockRequest.java @@ -43,6 +43,7 @@ public AddIndexBlockRequest(StreamInput in) throws IOException { * Constructs a new request for the specified block and indices */ public AddIndexBlockRequest(APIBlock block, String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.block = Objects.requireNonNull(block); this.indices = Objects.requireNonNull(indices); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java index 1f582f95aea9..09f9411d5a83 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java @@ -120,9 +120,12 @@ public RolloverRequest(StreamInput in) throws IOException { } } - RolloverRequest() {} + RolloverRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public RolloverRequest(String rolloverTarget, String newIndexName) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.rolloverTarget = rolloverTarget; this.newIndexName = newIndexName; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequest.java index 96cbfc80c8d6..42ff25657998 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsRequest.java @@ -51,7 +51,9 @@ public GetSettingsRequest includeDefaults(boolean includeDefaults) { return this; } - public GetSettingsRequest() {} + public GetSettingsRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public GetSettingsRequest(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java index 7fa2e11317a4..c3e87f2f54cf 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsRequest.java @@ -61,12 +61,15 @@ public UpdateSettingsRequest(StreamInput in) throws IOException { } } - public UpdateSettingsRequest() {} + public UpdateSettingsRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } /** * Constructs a new request to update settings for one or more indices */ public UpdateSettingsRequest(String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indices = indices; } @@ -74,6 +77,7 @@ public UpdateSettingsRequest(String... indices) { * Constructs a new request to update settings for one or more indices */ public UpdateSettingsRequest(Settings settings, String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indices = indices; this.settings = settings; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresRequest.java index 475c9c16f149..8cf2427e91c1 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresRequest.java @@ -40,10 +40,13 @@ public class IndicesShardStoresRequest extends MasterNodeReadRequestindices */ public IndicesShardStoresRequest(String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.indices = indices; } - public IndicesShardStoresRequest() {} + public IndicesShardStoresRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public IndicesShardStoresRequest(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java index c39d2e111461..ef709fc4457a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequest.java @@ -73,9 +73,12 @@ public ResizeRequest(StreamInput in) throws IOException { } } - ResizeRequest() {} + ResizeRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public ResizeRequest(String targetIndex, String sourceIndex) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.targetIndexRequest = new CreateIndexRequest(targetIndex); this.sourceIndex = sourceIndex; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateRequest.java index b3f3a0a203df..3c2416200ce6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateRequest.java @@ -28,12 +28,15 @@ public DeleteIndexTemplateRequest(StreamInput in) throws IOException { name = in.readString(); } - public DeleteIndexTemplateRequest() {} + public DeleteIndexTemplateRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } /** * Constructs a new delete index request for the specified name. */ public DeleteIndexTemplateRequest(String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.name = name; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComponentTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComponentTemplateAction.java index 593162305f2d..9ac10d782a60 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComponentTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComponentTemplateAction.java @@ -109,6 +109,7 @@ public Request(StreamInput in) throws IOException { * Constructs a new delete index request for the specified name. */ public Request(String... names) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.names = Objects.requireNonNull(names, "component templates to delete must not be null"); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComposableIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComposableIndexTemplateAction.java index f884c8404d0f..fa40a901c705 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComposableIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/TransportDeleteComposableIndexTemplateAction.java @@ -108,6 +108,7 @@ public Request(StreamInput in) throws IOException { * Constructs a new delete template request for the specified name. */ public Request(String... names) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.names = Objects.requireNonNull(names, "templates to delete must not be null"); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java index 3d5b4a73e0a5..5483097b140d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java @@ -48,9 +48,12 @@ public static class Request extends MasterNodeReadRequest { private String name; private boolean includeDefaults; - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.name = name; this.includeDefaults = false; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java index aebb9cef12f4..5cb35d23c8b7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java @@ -49,6 +49,7 @@ public static class Request extends MasterNodeReadRequest { * @param name A template name or pattern, or {@code null} to retrieve all templates. */ public Request(@Nullable String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); if (name != null && name.contains(",")) { throw new IllegalArgumentException("template name may not contain ','"); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequest.java index ec7ce037e651..19c89b018673 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesRequest.java @@ -25,6 +25,7 @@ public class GetIndexTemplatesRequest extends MasterNodeReadRequest { private TransportPutComposableIndexTemplateAction.Request indexTemplateRequest; private boolean includeDefaults = false; - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(String templateName) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); if (templateName == null) { throw new IllegalArgumentException("template name cannot be null"); } @@ -53,6 +56,7 @@ public Request(String templateName) { } public Request(TransportPutComposableIndexTemplateAction.Request indexTemplateRequest) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); if (indexTemplateRequest == null) { throw new IllegalArgumentException("index template body must be present"); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutComponentTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutComponentTemplateAction.java index 56e7079ec38b..ebf1e9e74b79 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutComponentTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutComponentTemplateAction.java @@ -56,6 +56,7 @@ public Request(StreamInput in) throws IOException { * Constructs a new put component template request with the provided name. */ public Request(String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.name = name; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java index 10c9a5e7205b..6ef887847c27 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java @@ -92,12 +92,15 @@ public PutIndexTemplateRequest(StreamInput in) throws IOException { version = in.readOptionalVInt(); } - public PutIndexTemplateRequest() {} + public PutIndexTemplateRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } /** * Constructs a new put index template request with the provided name. */ public PutIndexTemplateRequest(String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.name = name; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutComposableIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutComposableIndexTemplateAction.java index 8d259083a135..86c610946947 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutComposableIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutComposableIndexTemplateAction.java @@ -156,6 +156,7 @@ public Request(StreamInput in) throws IOException { * Constructs a new put index template request with the provided name. */ public Request(String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.name = name; } diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/CreateDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/CreateDataStreamAction.java index f9e559fa16ec..40060d5e5d92 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/CreateDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/CreateDataStreamAction.java @@ -36,11 +36,13 @@ public static class Request extends AcknowledgedRequest implements Indi private final long startTime; public Request(String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.name = name; this.startTime = System.currentTimeMillis(); } public Request(String name, long startTime) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.name = name; this.startTime = startTime; } diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/DeleteDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/DeleteDataStreamAction.java index b68a7d3fcd15..5b79eae0cebf 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/DeleteDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/DeleteDataStreamAction.java @@ -47,6 +47,7 @@ public static class Request extends MasterNodeRequest implements Indice private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, true, true, true, false, false, true, false); public Request(String... names) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.names = Objects.requireNonNull(names); this.wildcardExpressionsOriginallySpecified = Arrays.stream(names).anyMatch(Regex::isSimpleMatchPattern); } diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java index 1517b368e21e..812da87eab10 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java @@ -58,10 +58,12 @@ public static class Request extends MasterNodeReadRequest implements In private boolean includeDefaults = false; public Request(String[] names) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.names = names; } public Request(String[] names, boolean includeDefaults) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.names = names; this.includeDefaults = includeDefaults; } diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/MigrateToDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/MigrateToDataStreamAction.java index 3a834273e84c..226b8d44f636 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/MigrateToDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/MigrateToDataStreamAction.java @@ -35,6 +35,7 @@ public static class Request extends AcknowledgedRequest actions) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.actions = Collections.unmodifiableList(actions); } diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/PromoteDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/PromoteDataStreamAction.java index 3b3e644272cb..0853d30d22de 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/PromoteDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/PromoteDataStreamAction.java @@ -35,6 +35,7 @@ public static class Request extends MasterNodeRequest implements In private boolean includeDefaults = false; public Request(String[] names) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.names = names; } public Request(String[] names, boolean includeDefaults) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.names = names; this.includeDefaults = includeDefaults; } diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/PutDataStreamLifecycleAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/PutDataStreamLifecycleAction.java index 8156e03b0cdd..7bb63ae27b52 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/PutDataStreamLifecycleAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/lifecycle/PutDataStreamLifecycleAction.java @@ -95,6 +95,7 @@ public Request(String[] names, @Nullable TimeValue dataRetention) { } public Request(String[] names, DataStreamLifecycle lifecycle) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.names = names; this.lifecycle = lifecycle; } @@ -104,6 +105,7 @@ public Request(String[] names, @Nullable TimeValue dataRetention, @Nullable Bool } public Request(String[] names, @Nullable TimeValue dataRetention, @Nullable Boolean enabled, @Nullable Downsampling downsampling) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.names = names; this.lifecycle = DataStreamLifecycle.newBuilder() .dataRetention(dataRetention) diff --git a/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java b/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java index e8e299c58d2e..7d2b1be79731 100644 --- a/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java +++ b/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java @@ -50,13 +50,16 @@ public Request( final TimeValue waitTimeout, final DownsampleConfig downsampleConfig ) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.sourceIndex = sourceIndex; this.targetIndex = targetIndex; this.waitTimeout = waitTimeout == null ? DEFAULT_WAIT_TIMEOUT : waitTimeout; this.downsampleConfig = downsampleConfig; } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java index 3810d9587241..4ac4d63ba5de 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineRequest.java @@ -20,6 +20,7 @@ public class DeletePipelineRequest extends AcknowledgedRequest * Create a new pipeline request with the id and source along with the content type of the source */ public PutPipelineRequest(String id, BytesReference source, XContentType xContentType, Integer version) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.id = Objects.requireNonNull(id); this.source = Objects.requireNonNull(source); this.xContentType = Objects.requireNonNull(xContentType); diff --git a/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java b/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java index 7e271536be9f..2bbe3d36f031 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java @@ -34,18 +34,27 @@ public abstract class AcknowledgedRequest + * For requests which originate in the REST layer, use {@link + * org.elasticsearch.rest.RestUtils#getMasterNodeTimeout} to determine the timeout. + *

+ * For internally-generated requests, choose an appropriate timeout. Often this will be {@link + * TimeValue#MAX_VALUE} (or {@link TimeValue#MINUS_ONE} which means an infinite timeout in 8.15.0 onwards) + * since usually we want internal requests to wait for as long as necessary to complete. + * + * @param ackTimeout specifies how long to wait for all relevant nodes to apply a cluster state update and acknowledge this to + * the elected master. */ - protected AcknowledgedRequest() { - this(DEFAULT_ACK_TIMEOUT); + protected AcknowledgedRequest(TimeValue masterNodeTimeout, TimeValue ackTimeout) { + super(masterNodeTimeout); + this.ackTimeout = Objects.requireNonNull(ackTimeout); } - /** - * @param ackTimeout specifies how long to wait for all relevant nodes to apply a cluster state update and acknowledge this to the - * elected master. - */ + @Deprecated(forRemoval = true) // just a temporary compatibility shim protected AcknowledgedRequest(TimeValue ackTimeout) { - this.ackTimeout = Objects.requireNonNull(ackTimeout); + this(MasterNodeRequest.TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, ackTimeout); } protected AcknowledgedRequest(StreamInput in) throws IOException { @@ -94,6 +103,8 @@ public Plain(StreamInput in) throws IOException { super(in); } - public Plain() {} + public Plain() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } } } diff --git a/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadRequest.java b/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadRequest.java index 7f4100473c42..92788f53279d 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadRequest.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.TimeValue; import java.io.IOException; @@ -20,7 +21,20 @@ public abstract class MasterNodeReadRequest + * For requests which originate in the REST layer, use {@link + * org.elasticsearch.rest.RestUtils#getMasterNodeTimeout} to determine the timeout. + *

+ * For internally-generated requests, choose an appropriate timeout. Often this will be {@link + * TimeValue#MAX_VALUE} (or {@link TimeValue#MINUS_ONE} which means an infinite timeout in 8.15.0 onwards) + * since usually we want internal requests to wait for as long as necessary to complete. + */ + protected MasterNodeReadRequest(TimeValue masterNodeTimeout) { + super(masterNodeTimeout); + } protected MasterNodeReadRequest(StreamInput in) throws IOException { super(in); diff --git a/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java b/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java index 063dbb0397de..1b3dca31689e 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeRequest.java @@ -21,15 +21,36 @@ */ public abstract class MasterNodeRequest> extends ActionRequest { - public static final TimeValue DEFAULT_MASTER_NODE_TIMEOUT = TimeValue.timeValueSeconds(30); - - private TimeValue masterNodeTimeout = DEFAULT_MASTER_NODE_TIMEOUT; + /** + * The default timeout for master-node requests. It's super-trappy to have such a default, because it makes it all too easy to forget + * to add a mechanism by which clients can change it. Without such a mechanism things will work fine until we encounter a large cluster + * that is struggling to process cluster state updates fast enough, and it's a disaster if we cannot extend the master-node timeout in + * those cases. We shouldn't use this any more and should work towards removing it. + *

+ * For requests which originate in the REST layer, use {@link org.elasticsearch.rest.RestUtils#getMasterNodeTimeout} to determine the + * timeout. + *

+ * For internally-generated requests, choose an appropriate timeout. Often this will be {@link TimeValue#MAX_VALUE} (or {@link + * TimeValue#MINUS_ONE} which means an infinite timeout in 8.15.0 onwards) since usually we want internal requests to wait for as long + * as necessary to complete. + * + * @deprecated all requests should specify a timeout, see #107984. + */ + @Deprecated(forRemoval = true) + public static final TimeValue TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT = TimeValue.timeValueSeconds(30); - protected MasterNodeRequest() {} + private TimeValue masterNodeTimeout; /** * @param masterNodeTimeout Specifies how long to wait when the master has not been discovered yet, or is disconnected, or is busy - * processing other tasks. The value {@link TimeValue#MINUS_ONE} means to wait forever. + * processing other tasks. The value {@link TimeValue#MINUS_ONE} means to wait forever in 8.15.0 onwards. + *

+ * For requests which originate in the REST layer, use {@link + * org.elasticsearch.rest.RestUtils#getMasterNodeTimeout} to determine the timeout. + *

+ * For internally-generated requests, choose an appropriate timeout. Often this will be {@link + * TimeValue#MAX_VALUE} (or {@link TimeValue#MINUS_ONE} which means an infinite timeout in 8.15.0 onwards) + * since usually we want internal requests to wait for as long as necessary to complete. */ protected MasterNodeRequest(TimeValue masterNodeTimeout) { this.masterNodeTimeout = Objects.requireNonNull(masterNodeTimeout); @@ -49,7 +70,14 @@ public void writeTo(StreamOutput out) throws IOException { /** * Specifies how long to wait when the master has not been discovered yet, or is disconnected, or is busy processing other tasks. The - * value {@link TimeValue#MINUS_ONE} means to wait forever. + * value {@link TimeValue#MINUS_ONE} means to wait forever in 8.15.0 onwards. + *

+ * For requests which originate in the REST layer, use {@link org.elasticsearch.rest.RestUtils#getMasterNodeTimeout} to determine the + * timeout. + *

+ * For internally-generated requests, choose an appropriate timeout. Often this will be {@link TimeValue#MAX_VALUE} (or {@link + * TimeValue#MINUS_ONE} which means an infinite timeout in 8.15.0 onwards) since usually we want internal requests to wait for as long + * as necessary to complete. */ @SuppressWarnings("unchecked") public final Request masterNodeTimeout(TimeValue timeout) { diff --git a/server/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequest.java b/server/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequest.java index 00384852d147..94ba504c8b17 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequest.java @@ -26,10 +26,13 @@ public abstract class ClusterInfoRequest aliases = new ArrayList<>(); - TimeValue masterTimeout = MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT; + TimeValue masterTimeout = MasterNodeRequest.TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT; public PutRequest(String cause, String name) { this.cause = cause; @@ -1914,7 +1914,7 @@ public PutRequest version(Integer version) { public static class RemoveRequest { final String name; - TimeValue masterTimeout = MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT; + TimeValue masterTimeout = MasterNodeRequest.TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT; public RemoveRequest(String name) { this.name = name; diff --git a/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java index 44e86e056ef3..7ab682d3143e 100644 --- a/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java @@ -53,7 +53,9 @@ public static class Request extends MasterNodeRequest { private String localAbortReason; - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); @@ -64,6 +66,7 @@ public Request(StreamInput in) throws IOException { } public Request(String taskId, long allocationId, Exception exception, String localAbortReason) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.taskId = taskId; this.exception = exception; this.allocationId = allocationId; diff --git a/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java index 1fbdd03dcc26..26cf0658f60b 100644 --- a/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java @@ -41,7 +41,9 @@ public static class Request extends MasterNodeRequest { private String taskId; - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); @@ -49,6 +51,7 @@ public Request(StreamInput in) throws IOException { } public Request(String taskId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.taskId = taskId; } diff --git a/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java index 299891c64711..ce0e46e7b042 100644 --- a/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java @@ -51,7 +51,9 @@ public static class Request extends MasterNodeRequest { private PersistentTaskParams params; - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); @@ -61,6 +63,7 @@ public Request(StreamInput in) throws IOException { } public Request(String taskId, String taskName, PersistentTaskParams params) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.taskId = taskId; this.taskName = taskName; this.params = params; diff --git a/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java b/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java index dcf86f85eb70..6ecefa1bbf84 100644 --- a/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java @@ -45,7 +45,9 @@ public static class Request extends MasterNodeRequest { private long allocationId = -1L; private PersistentTaskState state; - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); @@ -55,6 +57,7 @@ public Request(StreamInput in) throws IOException { } public Request(String taskId, long allocationId, PersistentTaskState state) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.taskId = taskId; this.allocationId = allocationId; this.state = state; diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java index 6098ea777d38..7ccdb5da6d73 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.action.admin.cluster.reroute; import org.elasticsearch.action.support.master.AcknowledgedRequest; -import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.cluster.routing.allocation.command.AllocateEmptyPrimaryAllocationCommand; import org.elasticsearch.cluster.routing.allocation.command.AllocateReplicaAllocationCommand; import org.elasticsearch.cluster.routing.allocation.command.AllocateStalePrimaryAllocationCommand; @@ -22,6 +21,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.admin.cluster.RestClusterRerouteAction; import org.elasticsearch.test.ESTestCase; @@ -202,7 +202,7 @@ private RestRequest toRestRequest(ClusterRerouteRequest original) throws IOExcep if (original.isRetryFailed() || randomBoolean()) { params.put("retry_failed", Boolean.toString(original.isRetryFailed())); } - if (false == original.masterNodeTimeout().equals(MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT) || randomBoolean()) { + if (false == original.masterNodeTimeout().equals(TimeValue.THIRTY_SECONDS) || randomBoolean()) { params.put(REST_MASTER_TIMEOUT_PARAM, original.masterNodeTimeout().toString()); } if (original.getCommands() != null) { diff --git a/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java b/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java index 94e0ce1ccaf1..6d24f8d2fe9e 100644 --- a/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java @@ -153,7 +153,9 @@ public static class Request extends MasterNodeRequest implements Indice private String[] indices = Strings.EMPTY_ARRAY; private final RefCounted refCounted = AbstractRefCounted.of(() -> {}); - Request() {} + Request() { + super(TimeValue.THIRTY_SECONDS); + } Request(StreamInput in) throws IOException { super(in); diff --git a/server/src/test/java/org/elasticsearch/indices/settings/InternalOrPrivateSettingsPlugin.java b/server/src/test/java/org/elasticsearch/indices/settings/InternalOrPrivateSettingsPlugin.java index cb57096d0274..ed9a7427f14f 100644 --- a/server/src/test/java/org/elasticsearch/indices/settings/InternalOrPrivateSettingsPlugin.java +++ b/server/src/test/java/org/elasticsearch/indices/settings/InternalOrPrivateSettingsPlugin.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.tasks.Task; @@ -69,8 +70,6 @@ public static class Request extends MasterNodeRequest { private String key; private String value; - Request() {} - Request(StreamInput in) throws IOException { super(in); index = in.readString(); @@ -79,6 +78,7 @@ public static class Request extends MasterNodeRequest { } public Request(final String index, final String key, final String value) { + super(TimeValue.THIRTY_SECONDS); this.index = index; this.key = key; this.value = value; diff --git a/server/src/test/java/org/elasticsearch/reservedstate/ReservedClusterStateHandlerTests.java b/server/src/test/java/org/elasticsearch/reservedstate/ReservedClusterStateHandlerTests.java index a0ad31c65c8b..c92b0b0bf15d 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/ReservedClusterStateHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/ReservedClusterStateHandlerTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.MasterNodeRequest; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.settings.InternalOrPrivateSettingsPlugin; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentParser; @@ -43,6 +44,10 @@ public ValidRequest fromXContent(XContentParser parser) throws IOException { } static class ValidRequest extends MasterNodeRequest { + ValidRequest() { + super(TimeValue.THIRTY_SECONDS); + } + @Override public ActionRequestValidationException validate() { return null; diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/DeleteAutoscalingPolicyAction.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/DeleteAutoscalingPolicyAction.java index d3be1816924f..9b44daf6dd42 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/DeleteAutoscalingPolicyAction.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/DeleteAutoscalingPolicyAction.java @@ -34,6 +34,7 @@ public String name() { } public Request(final String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.name = Objects.requireNonNull(name); } diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/GetAutoscalingCapacityAction.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/GetAutoscalingCapacityAction.java index 4a356f74e03f..90c2d664b421 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/GetAutoscalingCapacityAction.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/GetAutoscalingCapacityAction.java @@ -39,6 +39,7 @@ public static class Request extends AcknowledgedRequest roles, final SortedMap deciders) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.name = name; this.roles = roles; this.deciders = deciders; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusRequest.java index 5883c36c9e2c..9e8e707db6b8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusRequest.java @@ -14,7 +14,9 @@ public class GetBasicStatusRequest extends MasterNodeReadRequest { - public GetBasicStatusRequest() {} + public GetBasicStatusRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public GetBasicStatusRequest(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusRequest.java index 93a0206ac70c..cae967058fb7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusRequest.java @@ -14,7 +14,9 @@ public class GetTrialStatusRequest extends MasterNodeReadRequest { - public GetTrialStatusRequest() {} + public GetTrialStatusRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public GetTrialStatusRequest(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicRequest.java index 602e521fe10e..7e9b0ebf44be 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicRequest.java @@ -16,7 +16,9 @@ public class PostStartBasicRequest extends AcknowledgedRequest { - public XPackUsageRequest() {} + public XPackUsageRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public XPackUsageRequest(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/frozen/FreezeRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/frozen/FreezeRequest.java index f32fd515e781..d1d04088dcdd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/frozen/FreezeRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/frozen/FreezeRequest.java @@ -27,6 +27,7 @@ public class FreezeRequest extends AcknowledgedRequest implements private ActiveShardCount waitForActiveShards = ActiveShardCount.DEFAULT; public FreezeRequest(String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indices = indices; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseRequest.java index e96c6a7632ec..ea4e53aced5f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/license/GetLicenseRequest.java @@ -14,7 +14,9 @@ public class GetLicenseRequest extends MasterNodeReadRequest { - public GetLicenseRequest() {} + public GetLicenseRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public GetLicenseRequest(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/action/MigrateToDataTiersRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/action/MigrateToDataTiersRequest.java index e6b087c97cdb..6584dcc279e8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/action/MigrateToDataTiersRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/cluster/action/MigrateToDataTiersRequest.java @@ -53,6 +53,7 @@ public static MigrateToDataTiersRequest parse(XContentParser parser) throws IOEx } public MigrateToDataTiersRequest(@Nullable String legacyTemplateToDelete, @Nullable String nodeAttributeName) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.legacyTemplateToDelete = legacyTemplateToDelete; this.nodeAttributeName = nodeAttributeName; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/SetResetModeActionRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/SetResetModeActionRequest.java index 3d46b2dd5070..6270c27ac463 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/SetResetModeActionRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/SetResetModeActionRequest.java @@ -44,6 +44,7 @@ public static SetResetModeActionRequest disabled(boolean deleteMetadata) { } SetResetModeActionRequest(boolean enabled, Boolean deleteMetadata) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.enabled = enabled; this.deleteMetadata = deleteMetadata != null && deleteMetadata; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ActivateAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ActivateAutoFollowPatternAction.java index 300d2844b7a2..df917b4e97b7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ActivateAutoFollowPatternAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ActivateAutoFollowPatternAction.java @@ -34,6 +34,7 @@ public static class Request extends AcknowledgedRequest { private final boolean active; public Request(final String name, final boolean active) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.name = name; this.active = active; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java index b12f7bf2dc06..b187e5e39dd3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/CcrStatsAction.java @@ -45,7 +45,9 @@ public Request(StreamInput in) throws IOException { } } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } @Override public ActionRequestValidationException validate() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/DeleteAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/DeleteAutoFollowPatternAction.java index 8e7e9f860524..e38a1cfd4a2c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/DeleteAutoFollowPatternAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/DeleteAutoFollowPatternAction.java @@ -32,6 +32,7 @@ public static class Request extends AcknowledgedRequest { private final String name; public Request(String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.name = name; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowInfoAction.java index c405e4e81ff1..d979a4cf44b9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/FollowInfoAction.java @@ -41,7 +41,9 @@ public static class Request extends MasterNodeReadRequest { private String[] followerIndices; - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public String[] getFollowerIndices() { return followerIndices; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/GetAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/GetAutoFollowPatternAction.java index 70f4f256c87e..bd6ab5bb5af4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/GetAutoFollowPatternAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/GetAutoFollowPatternAction.java @@ -34,7 +34,9 @@ public static class Request extends MasterNodeReadRequest { private String name; - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PauseFollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PauseFollowAction.java index 7ad8e5881e44..c6905b2d06a3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PauseFollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PauseFollowAction.java @@ -31,6 +31,7 @@ public static class Request extends MasterNodeRequest { private final String followIndex; public Request(String followIndex) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.followIndex = Objects.requireNonNull(followIndex, "followIndex"); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java index 92902aa9962a..333171d864c4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java @@ -85,7 +85,9 @@ public static Request fromXContent(XContentParser parser, String name) throws IO private FollowParameters parameters = new FollowParameters(); private List leaderIndexExclusionPatterns = Collections.emptyList(); - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } @Override public ActionRequestValidationException validate() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java index 6570fb66a275..db1e84aca9cd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutFollowAction.java @@ -85,7 +85,9 @@ public static Request fromXContent(final XContentParser parser) throws IOExcepti private FollowParameters parameters = new FollowParameters(); private ActiveShardCount waitForActiveShards = ActiveShardCount.NONE; - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public String getFollowerIndex() { return followerIndex; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java index 4cd84733b19e..12ddea8d9957 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/ResumeFollowAction.java @@ -54,7 +54,9 @@ public static Request fromXContent(final XContentParser parser, final String fol private String followerIndex; private FollowParameters parameters = new FollowParameters(); - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public String getFollowerIndex() { return followerIndex; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/UnfollowAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/UnfollowAction.java index 808df5f8bccb..9a5f011f39a1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/UnfollowAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/UnfollowAction.java @@ -34,6 +34,7 @@ public static class Request extends AcknowledgedRequest implements Indi private final String followerIndex; public Request(String followerIndex) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.followerIndex = followerIndex; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/DeleteEnrichPolicyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/DeleteEnrichPolicyAction.java index e44423229110..82f98176838e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/DeleteEnrichPolicyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/DeleteEnrichPolicyAction.java @@ -30,6 +30,7 @@ public static class Request extends MasterNodeRequest { - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/ExecuteEnrichPolicyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/ExecuteEnrichPolicyAction.java index 779ea535f74d..5d629365a809 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/ExecuteEnrichPolicyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/ExecuteEnrichPolicyAction.java @@ -34,6 +34,7 @@ public static class Request extends MasterNodeRequest { private boolean waitForCompletion; public Request(String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.name = Objects.requireNonNull(name, "name cannot be null"); this.waitForCompletion = true; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/GetEnrichPolicyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/GetEnrichPolicyAction.java index ef8229b407b5..37851a3641eb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/GetEnrichPolicyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/GetEnrichPolicyAction.java @@ -39,10 +39,12 @@ public static class Request extends MasterNodeReadRequest { private final List names; public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.names = new ArrayList<>(); } public Request(String[] names) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.names = Arrays.asList(names); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/PutEnrichPolicyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/PutEnrichPolicyAction.java index 4ebbb7523987..d1031828e052 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/PutEnrichPolicyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/action/PutEnrichPolicyAction.java @@ -37,6 +37,7 @@ public static class Request extends MasterNodeRequest { private String policyName; public Request(String policyName) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.policyName = policyName; } @@ -42,7 +43,9 @@ public Request(StreamInput in) throws IOException { policyName = in.readString(); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public String getPolicyName() { return policyName; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleAction.java index 41b29365b886..d359498f3362 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/GetLifecycleAction.java @@ -104,6 +104,7 @@ public static class Request extends AcknowledgedRequest { private final String[] policyNames; public Request(String... policyNames) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); if (policyNames == null) { throw new IllegalArgumentException("ids cannot be null"); } @@ -116,6 +117,7 @@ public Request(StreamInput in) throws IOException { } public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); policyNames = Strings.EMPTY_ARRAY; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/PutLifecycleRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/PutLifecycleRequest.java index fe6754b735ef..ebaaf4224625 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/PutLifecycleRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/action/PutLifecycleRequest.java @@ -38,6 +38,7 @@ public class PutLifecycleRequest extends AcknowledgedRequest { private final XContentType contentType; public Request(TaskType taskType, String inferenceEntityId, BytesReference content, XContentType contentType) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.taskType = taskType; this.inferenceEntityId = inferenceEntityId; this.content = content; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CreateTrainedModelAssignmentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CreateTrainedModelAssignmentAction.java index 23fed34d6889..9b383b2652af 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CreateTrainedModelAssignmentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CreateTrainedModelAssignmentAction.java @@ -36,6 +36,7 @@ public static class Request extends MasterNodeRequest { private final StartTrainedModelDeploymentAction.TaskParams taskParams; public Request(StartTrainedModelDeploymentAction.TaskParams taskParams) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.taskParams = ExceptionsHelper.requireNonNull(taskParams, "taskParams"); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarAction.java index 5c5e02559b1d..40560f11b503 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarAction.java @@ -36,6 +36,7 @@ public Request(StreamInput in) throws IOException { } public Request(String calendarId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.calendarId = ExceptionsHelper.requireNonNull(calendarId, Calendar.ID.getPreferredName()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarEventAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarEventAction.java index 7d37dc871638..efd35a3ba87f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarEventAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarEventAction.java @@ -38,6 +38,7 @@ public Request(StreamInput in) throws IOException { } public Request(String calendarId, String eventId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.calendarId = ExceptionsHelper.requireNonNull(calendarId, Calendar.ID.getPreferredName()); this.eventId = ExceptionsHelper.requireNonNull(eventId, ScheduledEvent.EVENT_ID.getPreferredName()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java index 48323692b791..82d6c3627353 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDataFrameAnalyticsAction.java @@ -48,6 +48,7 @@ public Request(StreamInput in) throws IOException { } public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); ackTimeout(DEFAULT_TIMEOUT); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java index 2681fadf8fc5..f25be9cd164a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java @@ -37,6 +37,7 @@ public static class Request extends AcknowledgedRequest implements ToXC private boolean force; public Request(String datafeedId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.datafeedId = ExceptionsHelper.requireNonNull(datafeedId, DatafeedConfig.ID.getPreferredName()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteFilterAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteFilterAction.java index 50cec50b2e25..782c7fa4a4db 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteFilterAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteFilterAction.java @@ -38,6 +38,7 @@ public Request(StreamInput in) throws IOException { } public Request(String filterId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.filterId = ExceptionsHelper.requireNonNull(filterId, FILTER_ID.getPreferredName()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteForecastAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteForecastAction.java index f3e888ef9599..5bf6a8e38e18 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteForecastAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteForecastAction.java @@ -40,6 +40,7 @@ public Request(StreamInput in) throws IOException { } public Request(String jobId, String forecastId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName()); this.forecastId = ExceptionsHelper.requireNonNull(forecastId, ForecastRequestStats.FORECAST_ID.getPreferredName()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java index 58b67e57acf2..99b045d19bdd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java @@ -44,6 +44,7 @@ public static class Request extends AcknowledgedRequest { private boolean deleteUserAnnotations; public Request(String jobId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAction.java index 9cd19eab449a..d76c4e2db064 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAction.java @@ -48,6 +48,7 @@ public Request(StreamInput in) throws IOException { } public Request(String id) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.id = ExceptionsHelper.requireNonNull(id, TrainedModelConfig.MODEL_ID); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAliasAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAliasAction.java index 507060b1e51a..27e895df5d41 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAliasAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAliasAction.java @@ -35,6 +35,7 @@ public static class Request extends AcknowledgedRequest { private final String modelId; public Request(String modelAlias, String modelId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.modelAlias = ExceptionsHelper.requireNonNull(modelAlias, MODEL_ALIAS); this.modelId = ExceptionsHelper.requireNonNull(modelId, TrainedModelConfig.MODEL_ID); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAssignmentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAssignmentAction.java index 04f1b3ddb2e2..9254d9ecc142 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAssignmentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteTrainedModelAssignmentAction.java @@ -30,6 +30,7 @@ public static class Request extends MasterNodeRequest { private final String modelId; public Request(String modelId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.modelId = ExceptionsHelper.requireNonNull(modelId, "model_id"); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ExplainDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ExplainDataFrameAnalyticsAction.java index 64b042b61c2b..305ed8c4fc60 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ExplainDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ExplainDataFrameAnalyticsAction.java @@ -60,6 +60,7 @@ public Request(StreamInput in) throws IOException { } public Request(DataFrameAnalyticsConfig config) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.config = config; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FinalizeJobExecutionAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FinalizeJobExecutionAction.java index b270c4506ba4..8fb1f3a91ab8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FinalizeJobExecutionAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FinalizeJobExecutionAction.java @@ -29,6 +29,7 @@ public static class Request extends MasterNodeRequest { private String[] jobIds; public Request(String[] jobIds) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.jobIds = jobIds; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushTrainedModelCacheAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushTrainedModelCacheAction.java index bdba626676b2..c24fc159769e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushTrainedModelCacheAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushTrainedModelCacheAction.java @@ -27,11 +27,11 @@ private FlushTrainedModelCacheAction() { public static class Request extends AcknowledgedRequest { public Request() { - super(); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); } Request(TimeValue timeout) { - super(timeout); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout); } public Request(StreamInput in) throws IOException { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java index 1bd266c68a65..e509b84b06ae 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java @@ -50,6 +50,7 @@ public Request(String datafeedId) { } public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); local(true); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java index 1a63eda0d687..fafb9afa99f8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java @@ -70,6 +70,7 @@ public static class Request extends MasterNodeReadRequest { private boolean allowNoMatch = true; public Request(String datafeedId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.datafeedId = ExceptionsHelper.requireNonNull(datafeedId, DatafeedConfig.ID.getPreferredName()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobModelSnapshotsUpgradeStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobModelSnapshotsUpgradeStatsAction.java index e5542593df4e..ec49603c89cb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobModelSnapshotsUpgradeStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobModelSnapshotsUpgradeStatsAction.java @@ -61,6 +61,7 @@ public static class Request extends MasterNodeReadRequest { public Request(TimeValue timeout) { - super(timeout); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout); } public Request(StreamInput in) throws IOException { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlMemoryAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlMemoryAction.java index e8b345b3c3ff..4664dbe8f7bc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlMemoryAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlMemoryAction.java @@ -68,6 +68,7 @@ public static class Request extends AcknowledgedRequest { private final String nodeId; public Request(String nodeId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.nodeId = ExceptionsHelper.requireNonNull(nodeId, "nodeId"); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java index b6f852605db9..cf17a828930c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java @@ -55,10 +55,12 @@ public static Request parseRequest(String jobId, XContentParser parser) { private JobParams jobParams; public Request(JobParams jobParams) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.jobParams = Objects.requireNonNull(jobParams); } public Request(String jobId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.jobParams = new JobParams(jobId); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java index fe26cdb0377f..82db002e4204 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDataFrameAnalyticsAction.java @@ -62,6 +62,7 @@ public Request(StreamInput in) throws IOException { } public Request(DataFrameAnalyticsConfig config) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.config = config; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java index 12e9b4f2967d..f79d2af49f53 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java @@ -43,6 +43,7 @@ public static Request parseRequest(String datafeedId, IndicesOptions indicesOpti private final DatafeedConfig datafeed; public Request(DatafeedConfig datafeed) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.datafeed = datafeed; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java index 9d8fca699df2..60d7f0008c0d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java @@ -51,6 +51,7 @@ public static Request parseRequest(String jobId, XContentParser parser, IndicesO public Request(Job.Builder jobBuilder) { // Validate the jobBuilder immediately so that errors can be detected prior to transportation. + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); jobBuilder.validateInputFields(); // Validate that detector configs are unique. // This validation logically belongs to validateInputFields call but we perform it only for PUT action to avoid BWC issues which diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAction.java index 2e5a47536951..25d32d19aef8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAction.java @@ -75,6 +75,7 @@ public Request(TrainedModelConfig config, boolean deferDefinitionDecompression) } public Request(TrainedModelConfig config, boolean deferDefinitionDecompression, boolean waitForCompletion) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.config = config; this.deferDefinitionDecompression = deferDefinitionDecompression; this.waitForCompletion = waitForCompletion; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAliasAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAliasAction.java index 9f0b5880f5c5..3ba91390f10d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAliasAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelAliasAction.java @@ -48,6 +48,7 @@ public static class Request extends AcknowledgedRequest { private final boolean reassign; public Request(String modelAlias, String modelId, boolean reassign) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.modelAlias = ExceptionsHelper.requireNonNull(modelAlias, MODEL_ALIAS); this.modelId = ExceptionsHelper.requireNonNull(modelId, TrainedModelConfig.MODEL_ID); this.reassign = reassign; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartAction.java index b7fcb98426cc..a588f7442699 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartAction.java @@ -76,6 +76,7 @@ public Request( int totalParts, boolean allowOverwriting ) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.modelId = ExceptionsHelper.requireNonNull(modelId, TrainedModelConfig.MODEL_ID); this.definition = ExceptionsHelper.requireNonNull(definition, DEFINITION); this.part = part; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelVocabularyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelVocabularyAction.java index 1abae7be9501..106f37a37889 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelVocabularyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelVocabularyAction.java @@ -70,6 +70,7 @@ public Request( @Nullable List scores, boolean allowOverwriting ) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.modelId = ExceptionsHelper.requireNonNull(modelId, TrainedModelConfig.MODEL_ID); this.vocabulary = ExceptionsHelper.requireNonNull(vocabulary, VOCABULARY); this.merges = Optional.ofNullable(merges).orElse(List.of()); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ResetJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ResetJobAction.java index bc74f16eea0e..548fd80da73d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ResetJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ResetJobAction.java @@ -57,6 +57,7 @@ public static class Request extends AcknowledgedRequest { private boolean deleteUserAnnotations; public Request(String jobId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java index eb975133e71e..0dd6fd8b5966 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java @@ -63,7 +63,9 @@ public static Request parseRequest(String jobId, String snapshotId, XContentPars private boolean deleteInterveningResults; private boolean force; - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); @@ -74,6 +76,7 @@ public Request(StreamInput in) throws IOException { } public Request(String jobId, String snapshotId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName()); this.snapshotId = ExceptionsHelper.requireNonNull(snapshotId, SNAPSHOT_ID.getPreferredName()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/SetUpgradeModeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/SetUpgradeModeAction.java index 9a1574bd2b03..821caf001f3e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/SetUpgradeModeAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/SetUpgradeModeAction.java @@ -43,6 +43,7 @@ public static class Request extends AcknowledgedRequest implements ToXC } public Request(boolean enabled) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.enabled = enabled; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDataFrameAnalyticsAction.java index 67abda2b3eb6..00e6a546be5a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDataFrameAnalyticsAction.java @@ -72,6 +72,7 @@ public static Request parseRequest(String id, XContentParser parser) { private TimeValue timeout = DEFAULT_TIMEOUT; public Request(String id) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); setId(id); } @@ -81,7 +82,9 @@ public Request(StreamInput in) throws IOException { timeout = in.readTimeValue(); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public final void setId(String id) { this.id = ExceptionsHelper.requireNonNull(id, DataFrameAnalyticsConfig.ID); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java index 18763a78fa45..deeed6df8706 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java @@ -66,14 +66,17 @@ public static Request parseRequest(String datafeedId, XContentParser parser) { private DatafeedParams params; public Request(String datafeedId, long startTime) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.params = new DatafeedParams(datafeedId, startTime); } public Request(String datafeedId, String startTime) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.params = new DatafeedParams(datafeedId, startTime); } public Request(DatafeedParams params) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.params = params; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java index 8d9da97538e1..b3cf9f16c3c8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java @@ -140,9 +140,12 @@ public static Request parseRequest(String modelId, String deploymentId, XContent private int queueCapacity = 1024; private Priority priority = Priority.NORMAL; - private Request() {} + private Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public Request(String modelId, String deploymentId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); setModelId(modelId); setDeploymentId(deploymentId); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDataFrameAnalyticsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDataFrameAnalyticsAction.java index d23f222b9687..513a4d7b2ea8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDataFrameAnalyticsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDataFrameAnalyticsAction.java @@ -57,6 +57,7 @@ public Request(StreamInput in) throws IOException { } public Request(DataFrameAnalyticsConfigUpdate update) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.update = update; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDatafeedAction.java index 694ca39d9cd4..0757f1f1dc7e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDatafeedAction.java @@ -43,6 +43,7 @@ public static Request parseRequest(String datafeedId, @Nullable IndicesOptions i private DatafeedUpdate update; public Request(DatafeedUpdate update) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.update = update; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java index 15cd272d12b8..33856bfcefbb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java @@ -46,6 +46,7 @@ public Request(String jobId, JobUpdate update) { } private Request(String jobId, JobUpdate update, boolean isInternal) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.jobId = jobId; this.update = update; this.isInternal = isInternal; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelAssignmentRoutingInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelAssignmentRoutingInfoAction.java index 5cd55a201c45..fd1b179da891 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelAssignmentRoutingInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelAssignmentRoutingInfoAction.java @@ -33,6 +33,7 @@ public static class Request extends MasterNodeRequest { private final RoutingInfoUpdate update; public Request(String nodeId, String deploymentId, RoutingInfoUpdate update) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.nodeId = ExceptionsHelper.requireNonNull(nodeId, "node_id"); this.deploymentId = ExceptionsHelper.requireNonNull(deploymentId, "deployment_id"); this.update = ExceptionsHelper.requireNonNull(update, "update"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java index bb113a9b3e1e..62a7d84c60a6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateTrainedModelDeploymentAction.java @@ -64,9 +64,12 @@ public static Request parseRequest(String deploymentId, XContentParser parser) { private String deploymentId; private int numberOfAllocations; - private Request() {} + private Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public Request(String deploymentId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); setDeploymentId(deploymentId); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpgradeJobModelSnapshotAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpgradeJobModelSnapshotAction.java index 7fbcffa47615..abe481c926fd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpgradeJobModelSnapshotAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpgradeJobModelSnapshotAction.java @@ -71,6 +71,7 @@ public static UpgradeJobModelSnapshotAction.Request parseRequest(XContentParser } public Request(String jobId, String snapshotId, TimeValue timeValue, boolean waitForCompletion) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.jobId = ExceptionsHelper.requireNonNull(jobId, Job.ID); this.snapshotId = ExceptionsHelper.requireNonNull(snapshotId, SNAPSHOT_ID); this.timeout = timeValue == null ? DEFAULT_TIMEOUT : timeValue; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/packageloader/action/GetTrainedModelPackageConfigAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/packageloader/action/GetTrainedModelPackageConfigAction.java index 8fcc977e3fae..ea67dfdfb185 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/packageloader/action/GetTrainedModelPackageConfigAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/packageloader/action/GetTrainedModelPackageConfigAction.java @@ -37,10 +37,12 @@ public static class Request extends MasterNodeRequest { - public MonitoringMigrateAlertsRequest() {} + public MonitoringMigrateAlertsRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } public MonitoringMigrateAlertsRequest(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java index 06a6b4c2a072..7f1e81164a51 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java @@ -38,6 +38,7 @@ public static class Request extends AcknowledgedRequest implements Indi private IndicesOptions indicesOptions = IndicesOptions.fromOptions(false, false, true, false); public Request(RollupJobConfig config) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.config = config; } @@ -48,6 +49,7 @@ public Request(StreamInput in) throws IOException { public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); } public static Request fromXContent(final XContentParser parser, final String id) throws IOException { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/searchablesnapshots/MountSearchableSnapshotRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/searchablesnapshots/MountSearchableSnapshotRequest.java index 3cb7b5b07fc1..fba742e28803 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/searchablesnapshots/MountSearchableSnapshotRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/searchablesnapshots/MountSearchableSnapshotRequest.java @@ -101,6 +101,7 @@ public MountSearchableSnapshotRequest( boolean waitForCompletion, Storage storage ) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.mountedIndexName = Objects.requireNonNull(mountedIndexName); this.repositoryName = Objects.requireNonNull(repositoryName); this.snapshotName = Objects.requireNonNull(snapshotName); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/GetSecuritySettingsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/GetSecuritySettingsAction.java index bc8d81cd268a..7623a7f65af3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/GetSecuritySettingsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/GetSecuritySettingsAction.java @@ -34,9 +34,13 @@ public GetSecuritySettingsAction() { public static class Request extends MasterNodeReadRequest { - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } - public Request(StreamInput in) throws IOException {} + public Request(StreamInput in) throws IOException { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } @Override public void writeTo(StreamOutput out) throws IOException {} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsAction.java index 20feb0faf503..3cce133749e4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/settings/UpdateSecuritySettingsAction.java @@ -72,12 +72,14 @@ public Request( Map tokensIndexSettings, Map profilesIndexSettings ) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.mainIndexSettings = Objects.requireNonNullElse(mainIndexSettings, Collections.emptyMap()); this.tokensIndexSettings = Objects.requireNonNullElse(tokensIndexSettings, Collections.emptyMap()); this.profilesIndexSettings = Objects.requireNonNullElse(profilesIndexSettings, Collections.emptyMap()); } public Request(StreamInput in) throws IOException { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.mainIndexSettings = in.readGenericMap(); this.tokensIndexSettings = in.readGenericMap(); this.profilesIndexSettings = in.readGenericMap(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/DeleteSnapshotLifecycleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/DeleteSnapshotLifecycleAction.java index 17a23f6b66b5..6e083295b086 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/DeleteSnapshotLifecycleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/DeleteSnapshotLifecycleAction.java @@ -33,9 +33,12 @@ public Request(StreamInput in) throws IOException { lifecycleId = in.readString(); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public Request(String lifecycleId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.lifecycleId = Objects.requireNonNull(lifecycleId, "id may not be null"); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotLifecycleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotLifecycleAction.java index 8a8ecf3a747a..442ff6b2bfb6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotLifecycleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotLifecycleAction.java @@ -36,6 +36,7 @@ public static class Request extends AcknowledgedRequest implements ToXC private String lifecycleId; public Request(String lifecycleId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.lifecycleId = lifecycleId; } @@ -44,7 +45,9 @@ public Request(StreamInput in) throws IOException { lifecycleId = in.readString(); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public String getLifecycleId() { return this.lifecycleId; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotRetentionAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotRetentionAction.java index 9574ba7fff68..e4d698f48d25 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotRetentionAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotRetentionAction.java @@ -26,7 +26,9 @@ protected ExecuteSnapshotRetentionAction() { public static class Request extends AcknowledgedRequest implements ToXContentObject { - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public Request(StreamInput in) throws IOException { super(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/GetSnapshotLifecycleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/GetSnapshotLifecycleAction.java index d556c0fda5e7..ad62b155da41 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/GetSnapshotLifecycleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/GetSnapshotLifecycleAction.java @@ -35,6 +35,7 @@ public static class Request extends AcknowledgedRequest implements ToXC private SnapshotLifecyclePolicy lifecycle; public Request(String lifecycleId, SnapshotLifecyclePolicy lifecycle) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.lifecycleId = lifecycleId; this.lifecycle = lifecycle; } @@ -46,7 +47,9 @@ public Request(StreamInput in) throws IOException { lifecycle = new SnapshotLifecyclePolicy(in); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public String getLifecycleId() { return this.lifecycleId; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StartSLMAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StartSLMAction.java index d6deb7bda384..666701ac1f88 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StartSLMAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StartSLMAction.java @@ -28,7 +28,9 @@ public Request(StreamInput in) throws IOException { super(in); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } @Override public int hashCode() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StopSLMAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StopSLMAction.java index 60be1b99cde8..4aae048b5e5b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StopSLMAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/StopSLMAction.java @@ -28,7 +28,9 @@ public Request(StreamInput in) throws IOException { super(in); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } @Override public int hashCode() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/DeleteTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/DeleteTransformAction.java index 3623c659216d..79ae38745934 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/DeleteTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/DeleteTransformAction.java @@ -34,7 +34,7 @@ public static class Request extends AcknowledgedRequest { private final boolean deleteDestIndex; public Request(String id, boolean force, boolean deleteDestIndex, TimeValue timeout) { - super(timeout); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout); this.id = ExceptionsHelper.requireNonNull(id, TransformField.ID.getPreferredName()); this.force = force; this.deleteDestIndex = deleteDestIndex; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformAction.java index f06ba16d9da7..adebbba651f1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PreviewTransformAction.java @@ -58,7 +58,7 @@ public static class Request extends AcknowledgedRequest implements ToXC private final TransformConfig config; public Request(TransformConfig config, TimeValue timeout) { - super(timeout); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout); this.config = config; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PutTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PutTransformAction.java index 9d335b2ccdb3..496e82665157 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PutTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/PutTransformAction.java @@ -57,7 +57,7 @@ public static class Request extends AcknowledgedRequest { private final boolean deferValidation; public Request(TransformConfig config, boolean deferValidation, TimeValue timeout) { - super(timeout); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout); this.config = config; this.deferValidation = deferValidation; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ResetTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ResetTransformAction.java index 609dd33cbfa9..5840e107c1d1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ResetTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ResetTransformAction.java @@ -34,7 +34,7 @@ public static class Request extends AcknowledgedRequest { private final boolean force; public Request(String id, boolean force, TimeValue timeout) { - super(timeout); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout); this.id = ExceptionsHelper.requireNonNull(id, TransformField.ID.getPreferredName()); this.force = force; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StartTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StartTransformAction.java index 3ecadd1b708c..838a0650c8af 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StartTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/StartTransformAction.java @@ -39,7 +39,7 @@ public static class Request extends AcknowledgedRequest { private final Instant from; public Request(String id, Instant from, TimeValue timeout) { - super(timeout); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout); this.id = ExceptionsHelper.requireNonNull(id, TransformField.ID.getPreferredName()); this.from = from; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpgradeTransformsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpgradeTransformsAction.java index 3a36d9163e0c..cdc0a53b6f0a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpgradeTransformsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/UpgradeTransformsAction.java @@ -40,7 +40,7 @@ public Request(StreamInput in) throws IOException { } public Request(boolean dryRun, TimeValue timeout) { - super(timeout); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout); this.dryRun = dryRun; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ValidateTransformAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ValidateTransformAction.java index de6435ad31db..55c21b91b11d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ValidateTransformAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/ValidateTransformAction.java @@ -36,7 +36,7 @@ public static class Request extends AcknowledgedRequest { private final boolean deferValidation; public Request(TransformConfig config, boolean deferValidation, TimeValue timeout) { - super(timeout); + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout); this.config = config; this.deferValidation = deferValidation; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/GetWatcherSettingsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/GetWatcherSettingsAction.java index 576bd220853c..902c6db07dc8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/GetWatcherSettingsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/GetWatcherSettingsAction.java @@ -30,9 +30,13 @@ public GetWatcherSettingsAction() { public static class Request extends MasterNodeReadRequest { - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } - public Request(StreamInput in) throws IOException {} + public Request(StreamInput in) throws IOException { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } @Override public void writeTo(StreamOutput out) throws IOException {} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/UpdateWatcherSettingsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/UpdateWatcherSettingsAction.java index 29f4db51e146..b6d999ebbf38 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/UpdateWatcherSettingsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/UpdateWatcherSettingsAction.java @@ -39,10 +39,12 @@ public static class Request extends AcknowledgedRequest { private final Map settings; public Request(Map settings) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.settings = settings; } public Request(StreamInput in) throws IOException { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.settings = in.readGenericMap(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/service/WatcherServiceRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/service/WatcherServiceRequest.java index 93cc7a18594d..449179e4f18f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/service/WatcherServiceRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/service/WatcherServiceRequest.java @@ -29,7 +29,9 @@ public WatcherServiceRequest(StreamInput in) throws IOException { command = Command.valueOf(in.readString().toUpperCase(Locale.ROOT)); } - public WatcherServiceRequest() {} + public WatcherServiceRequest() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); + } /** * Starts the watcher service if not already started. diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappingsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappingsTests.java index 6ba7dc6ac24c..9d3c4d684e19 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappingsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappingsTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.admin.indices.mapping.put.TransportPutMappingAction; import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -21,6 +20,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.indices.SystemIndexDescriptor; @@ -297,7 +297,7 @@ public void testAddDocMappingIfMissing() { {"_doc":{"properties":{"some-field":{"type":"long"}}}}""", client, clusterState, - MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, + TimeValue.THIRTY_SECONDS, ActionTestUtils.assertNoFailureListener(Assert::assertTrue), 1 ); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java index f9fdc0c8362e..f72ca14c37e1 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAliasTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; -import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.client.internal.AdminClient; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.ClusterAdminClient; @@ -371,7 +370,7 @@ private void createIndexAndAliasIfNecessary(ClusterState clusterState) { TestIndexNameExpressionResolver.newInstance(), TEST_INDEX_PREFIX, TEST_INDEX_ALIAS, - MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, + TimeValue.THIRTY_SECONDS, listener ); } diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java index 13ef19886328..3376073bded0 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationInfoAction.java @@ -331,6 +331,7 @@ public static class Request extends MasterNodeReadRequest implements In private String[] indices; public Request(String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.indices = indices; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/DeleteAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/DeleteAnalyticsCollectionAction.java index 43601ab1b294..ac5c5761efe1 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/DeleteAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/DeleteAnalyticsCollectionAction.java @@ -44,6 +44,7 @@ public Request(StreamInput in) throws IOException { } public Request(String collectionName) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.collectionName = collectionName; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/GetAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/GetAnalyticsCollectionAction.java index f9eeb2cca6d2..d54c119e083e 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/GetAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/GetAnalyticsCollectionAction.java @@ -41,6 +41,7 @@ public static class Request extends MasterNodeReadRequest implements To public static ParseField NAMES_FIELD = new ParseField("names"); public Request(String[] names) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.names = Objects.requireNonNull(names, "Collection names cannot be null"); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java index 659c58d2bd1b..108cebae155b 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java @@ -43,6 +43,7 @@ public Request(StreamInput in) throws IOException { } public Request(String name) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.name = name; } diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportMoveToStepAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportMoveToStepAction.java index 6061b6db8972..87c93a919821 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportMoveToStepAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportMoveToStepAction.java @@ -212,6 +212,7 @@ public static class Request extends AcknowledgedRequest implements ToXC private PartialStepKey nextStepKey; public Request(String index, Step.StepKey currentStepKey, PartialStepKey nextStepKey) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.index = index; this.currentStepKey = currentStepKey; this.nextStepKey = nextStepKey; @@ -224,7 +225,9 @@ public Request(StreamInput in) throws IOException { this.nextStepKey = new PartialStepKey(in); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public String getIndex() { return index; diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportRetryAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportRetryAction.java index 5818ce6582be..95358adb832c 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportRetryAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportRetryAction.java @@ -118,6 +118,7 @@ public static class Request extends AcknowledgedRequest implements Indi private IndicesOptions indicesOptions = IndicesOptions.strictExpandOpen(); public Request(String... indices) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.indices = indices; } @@ -127,7 +128,9 @@ public Request(StreamInput in) throws IOException { this.indicesOptions = IndicesOptions.readIndicesOptions(in); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } @Override public Request indices(String... indices) { diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java index 57aba2bb80d6..f09d86708766 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotRetentionIT.java @@ -17,7 +17,6 @@ import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilder; @@ -71,7 +70,7 @@ public void addMlState() { client(), ClusterState.EMPTY_STATE, TestIndexNameExpressionResolver.newInstance(), - MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, + TimeValue.THIRTY_SECONDS, future ); future.actionGet(); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotSearchIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotSearchIT.java index 2e16436736e8..2f8165e6a20b 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotSearchIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelSnapshotSearchIT.java @@ -15,8 +15,8 @@ import org.elasticsearch.action.index.TransportIndexAction; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -60,7 +60,7 @@ public void addMlState() { client(), ClusterState.EMPTY_STATE, TestIndexNameExpressionResolver.newInstance(), - MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, + TimeValue.THIRTY_SECONDS, future ); future.actionGet(); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java index 6cb467af525c..bc8e4794d7da 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.OperationRouting; @@ -200,7 +199,7 @@ protected void updateModelSnapshotOnJob(ModelSnapshot modelSnapshot) { client(), ClusterState.EMPTY_STATE, TestIndexNameExpressionResolver.newInstance(), - MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, + TimeValue.THIRTY_SECONDS, future ); future.get(); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java index ae128b507c79..675933808c60 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java @@ -18,7 +18,6 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.AliasMetadata; @@ -1101,7 +1100,7 @@ private void indexQuantiles(Quantiles quantiles) { client(), ClusterState.EMPTY_STATE, TestIndexNameExpressionResolver.newInstance(), - MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, + TimeValue.THIRTY_SECONDS, future ); future.actionGet(); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/UnusedStatsRemoverIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/UnusedStatsRemoverIT.java index 4c8382047e79..ee96d154ab55 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/UnusedStatsRemoverIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/UnusedStatsRemoverIT.java @@ -9,9 +9,9 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.xcontent.ToXContent; @@ -57,7 +57,7 @@ public void createComponents() { client(), clusterService().state(), TestIndexNameExpressionResolver.newInstance(client().threadPool().getThreadContext()), - MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, + TimeValue.THIRTY_SECONDS, future ); future.actionGet(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java index c849e69c780b..a2d8fd1d6031 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlInitializationService.java @@ -146,7 +146,7 @@ public void clusterChanged(ClusterChangedEvent event) { AnnotationIndex.createAnnotationsIndexIfNecessary( client, event.state(), - MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, + MasterNodeRequest.TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, ActionListener.wrap(r -> isIndexCreationInProgress.set(false), e -> { if (e.getMessage().equals(previousException)) { logger.debug("Error creating ML annotations index or aliases", e); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/TrainedModelStatsService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/TrainedModelStatsService.java index 9fc97ff234c5..4ee294bcf0d8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/TrainedModelStatsService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/TrainedModelStatsService.java @@ -256,14 +256,14 @@ private void createStatsIndexIfNecessary() { client, clusterState, indexNameExpressionResolver, - MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, + MasterNodeRequest.TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, ActionListener.wrap( r -> ElasticsearchMappings.addDocMappingIfMissing( MlStatsIndex.writeAlias(), MlStatsIndex::wrappedMapping, client, clusterState, - MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, + MasterNodeRequest.TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, listener, MlStatsIndex.STATS_INDEX_MAPPINGS_VERSION ), diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java index 7a314b82024b..8d83156b0e0e 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java @@ -100,7 +100,6 @@ import java.util.function.BiConsumer; import java.util.function.Consumer; -import static org.elasticsearch.action.support.master.MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT; import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_INDEX_HIDDEN; import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; @@ -268,7 +267,7 @@ public void testOpenJob() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); when(jobTask.getAllocationId()).thenReturn(1L); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); assertEquals(1, manager.numberOfOpenJobs()); assertTrue(manager.jobHasActiveAutodetectProcess(jobTask)); ArgumentCaptor captor = ArgumentCaptor.forClass(JobTaskState.class); @@ -296,7 +295,7 @@ public void testOpenJob_withoutVersion() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn(job.getId()); AtomicReference errorHolder = new AtomicReference<>(); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> errorHolder.set(e)); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> errorHolder.set(e)); Exception error = errorHolder.get(); assertThat(error, is(notNullValue())); assertThat(error.getMessage(), equalTo("Cannot open job [no_version] because jobs created prior to version 5.5 are not supported")); @@ -339,22 +338,22 @@ public void testOpenJob_exceedMaxNumJobs() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("bar"); when(jobTask.getAllocationId()).thenReturn(1L); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("baz"); when(jobTask.getAllocationId()).thenReturn(2L); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); assertEquals(3, manager.numberOfOpenJobs()); Exception[] holder = new Exception[1]; jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foobar"); when(jobTask.getAllocationId()).thenReturn(3L); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> holder[0] = e); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> holder[0] = e); Exception e = holder[0]; assertEquals("max running job capacity [3] reached", e.getMessage()); @@ -363,7 +362,7 @@ public void testOpenJob_exceedMaxNumJobs() { when(jobTask.getJobId()).thenReturn("baz"); manager.closeJob(jobTask, null); assertEquals(2, manager.numberOfOpenJobs()); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e1, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e1, b) -> {}); assertEquals(3, manager.numberOfOpenJobs()); } @@ -374,7 +373,7 @@ public void testProcessData() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); DataLoadParams params = new DataLoadParams(TimeRange.builder().build(), Optional.empty()); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData( jobTask, analysisRegistry, @@ -401,7 +400,7 @@ public void testProcessDataThrowsElasticsearchStatusException_onIoException() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); Exception[] holder = new Exception[1]; manager.processData(jobTask, analysisRegistry, inputStream, xContentType, params, (dataCounts1, e) -> holder[0] = e); assertNotNull(holder[0]); @@ -413,7 +412,7 @@ public void testCloseJob() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData( jobTask, analysisRegistry, @@ -443,7 +442,7 @@ public void testVacate() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); when(jobTask.triggerVacate()).thenReturn(true); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData( jobTask, analysisRegistry, @@ -475,7 +474,7 @@ public void testCanCloseClosingJob() throws Exception { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData( jobTask, analysisRegistry, @@ -528,7 +527,7 @@ public void testCanKillClosingJob() throws Exception { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData( jobTask, analysisRegistry, @@ -562,7 +561,7 @@ public void testBucketResetMessageIsSent() { InputStream inputStream = createInputStream(""); JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData(jobTask, analysisRegistry, inputStream, xContentType, params, (dataCounts1, e) -> {}); verify(autodetectCommunicator).writeToJob(same(inputStream), same(analysisRegistry), same(xContentType), same(params), any()); } @@ -573,7 +572,7 @@ public void testFlush() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); InputStream inputStream = createInputStream(""); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData( jobTask, analysisRegistry, @@ -617,7 +616,7 @@ public void testCloseThrows() { // create a jobtask JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData( jobTask, analysisRegistry, @@ -660,7 +659,7 @@ public void testJobHasActiveAutodetectProcess() { when(jobTask.getJobId()).thenReturn("foo"); assertFalse(manager.jobHasActiveAutodetectProcess(jobTask)); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData( jobTask, analysisRegistry, @@ -683,7 +682,7 @@ public void testKillKillsAutodetectProcess() throws IOException { when(jobTask.getJobId()).thenReturn("foo"); assertFalse(manager.jobHasActiveAutodetectProcess(jobTask)); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData( jobTask, analysisRegistry, @@ -728,7 +727,7 @@ public void testProcessData_GivenStateNotOpened() { JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); InputStream inputStream = createInputStream(""); DataCounts[] dataCounts = new DataCounts[1]; manager.processData( @@ -836,7 +835,7 @@ public void testGetOpenProcessMemoryUsage() { AutodetectProcessManager manager = createSpyManager(); JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("foo"); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); long expectedSizeBytes = Job.PROCESS_MEMORY_OVERHEAD.getBytes() + switch (assignmentMemoryBasis) { case MODEL_MEMORY_LIMIT -> modelMemoryLimitBytes; @@ -905,7 +904,7 @@ private AutodetectProcessManager createSpyManagerAndCallProcessData(String jobId AutodetectProcessManager manager = createSpyManager(); JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn(jobId); - manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + manager.openJob(jobTask, clusterState, TimeValue.THIRTY_SECONDS, (e, b) -> {}); manager.processData( jobTask, analysisRegistry, diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStatusAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStatusAction.java index 0d8f3aad27da..05ab989f444f 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStatusAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetStatusAction.java @@ -133,7 +133,9 @@ public Request(StreamInput in) throws IOException { waitForResourcesCreated = in.readBoolean(); } - public Request() {} + public Request() { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); + } public boolean waitForResourcesCreated() { return waitForResourcesCreated; diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/DeleteShutdownNodeAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/DeleteShutdownNodeAction.java index 4446e0aeae4d..14417c693f28 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/DeleteShutdownNodeAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/DeleteShutdownNodeAction.java @@ -33,10 +33,12 @@ public static class Request extends AcknowledgedRequest { private final String nodeId; public Request(String nodeId) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.nodeId = nodeId; } public Request(StreamInput in) throws IOException { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); if (in.getTransportVersion().isPatchFrom(TransportVersions.V_8_13_4) || in.getTransportVersion().isPatchFrom(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_14) || in.getTransportVersion().onOrAfter(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX)) { diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusAction.java index b82e6a08fb26..7266f8ff7112 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/GetShutdownStatusAction.java @@ -43,6 +43,7 @@ public static class Request extends MasterNodeRequest { private final String[] nodeIds; public Request(String... nodeIds) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT); this.nodeIds = nodeIds; } diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/PutShutdownNodeAction.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/PutShutdownNodeAction.java index 8356285c10d0..d857ee4b322d 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/PutShutdownNodeAction.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/PutShutdownNodeAction.java @@ -90,6 +90,7 @@ public Request( @Nullable String targetNodeName, @Nullable TimeValue gracePeriod ) { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); this.nodeId = nodeId; this.type = type; this.reason = reason; @@ -100,6 +101,7 @@ public Request( @UpdateForV9 // TODO call super(in) instead of explicitly reading superclass contents once bwc no longer needed public Request(StreamInput in) throws IOException { + super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, DEFAULT_ACK_TIMEOUT); if (in.getTransportVersion().isPatchFrom(TransportVersions.V_8_13_4) || in.getTransportVersion().isPatchFrom(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX_8_14) || in.getTransportVersion().onOrAfter(TransportVersions.SHUTDOWN_REQUEST_TIMEOUTS_FIX)) { From a9c848a0eb43e8e7035c6b4aa9888e3716341d7d Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 13 May 2024 12:27:30 -0400 Subject: [PATCH 097/119] ESQL: Document the features API (#108573) This adds documentation to our use of the features API, warning users away from using it to control tests especially, now that we have the capabilities API. --- .../xpack/esql/plugin/EsqlFeatures.java | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java index 4f852264193b..cf311d441367 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java @@ -10,10 +10,23 @@ import org.elasticsearch.Version; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.rest.action.admin.cluster.RestNodesCapabilitiesAction; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import java.util.Map; import java.util.Set; +/** + * {@link NodeFeature}s declared by ESQL. These should be used for fast checks + * on the node. Before the introduction of the {@link RestNodesCapabilitiesAction} + * this was used for controlling which features are tested so many of the + * examples below are *just* used for that. Don't make more of those - add them + * to {@link EsqlCapabilities} instead. + *

+ * NOTE: You can't remove a feature now and probably never will be able to. + * Only add more of these if you need a fast CPU level check. + *

+ */ public class EsqlFeatures implements FeatureSpecification { /** * Introduction of {@code MV_SORT}, {@code MV_SLICE}, and {@code MV_ZIP}. From b704a4b3a63262cfc5b727059c7063796036f4ae Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 13 May 2024 12:37:51 -0400 Subject: [PATCH 098/119] ESQL: Use `required_capability` in CSV spec (#108570) This renames `required_feature` to `required_capability` because we're now using the newlt built `capabilities` API. --- .../src/main/resources/blog.csv-spec | 2 +- .../src/main/resources/boolean.csv-spec | 24 +- .../cartesian_multipolygons.csv-spec | 32 +-- .../src/main/resources/conditional.csv-spec | 10 +- .../src/main/resources/convert.csv-spec | 36 +-- .../src/main/resources/date.csv-spec | 30 +- .../src/main/resources/enrich.csv-spec | 66 ++--- .../src/main/resources/eval.csv-spec | 2 +- .../src/main/resources/floats.csv-spec | 28 +- .../src/main/resources/from.csv-spec | 2 +- .../src/main/resources/ints.csv-spec | 78 +++--- .../src/main/resources/ip.csv-spec | 50 ++-- .../src/main/resources/math.csv-spec | 26 +- .../src/main/resources/metadata.csv-spec | 30 +- .../src/main/resources/spatial.csv-spec | 258 +++++++++--------- .../main/resources/spatial_shapes.csv-spec | 74 ++--- .../src/main/resources/string.csv-spec | 54 ++-- .../src/main/resources/unsigned_long.csv-spec | 16 +- .../src/main/resources/version.csv-spec | 16 +- .../elasticsearch/xpack/ql/CsvSpecReader.java | 4 +- 20 files changed, 419 insertions(+), 419 deletions(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog.csv-spec index 64c4641b2ca0..3f6ef72d84bc 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog.csv-spec @@ -1,7 +1,7 @@ # Examples that were published in a blog post 2023-08-08.full-blown-query -required_feature: esql.enrich_load +required_capability: enrich_load FROM employees | WHERE still_hired == true diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec index 809f4e9ba2c7..c0572e7bbcd4 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec @@ -63,7 +63,7 @@ avg(salary):double | always_false:boolean in -required_feature: esql.mv_warn +required_capability: mv_warn from employees | keep emp_no, is_rehired, still_hired | where is_rehired in (still_hired, true) | where is_rehired != still_hired; ignoreOrder:true @@ -236,7 +236,7 @@ emp_no:integer |languages:integer |byte2bool:boolean |short2bool:boolean ; mvSort -required_feature: esql.mv_sort +required_capability: mv_sort row a = [true, false, true, false] | eval sa = mv_sort(a), sb = mv_sort(a, "DESC"); @@ -245,7 +245,7 @@ a:boolean | sa:boolean | sb:boolean ; mvSortEmp -required_feature: esql.mv_sort +required_capability: mv_sort FROM employees | eval sd = mv_sort(is_rehired, "DESC"), sa = mv_sort(is_rehired) @@ -263,7 +263,7 @@ emp_no:integer | is_rehired:boolean | sa:boolean | sd:boolea ; mvSlice -required_feature: esql.mv_sort +required_capability: mv_sort row a = [true, false, false, true] | eval a1 = mv_slice(a, 1), a2 = mv_slice(a, 2, 3); @@ -273,7 +273,7 @@ a:boolean | a1:boolean | a2:boolean ; mvSliceEmp -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval a1 = mv_slice(is_rehired, 0) @@ -290,7 +290,7 @@ emp_no:integer | is_rehired:boolean | a1:boolean ; values -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -302,7 +302,7 @@ required_feature: esql.agg_values ; valuesGrouped -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -323,7 +323,7 @@ still_hired:boolean | first_letter:keyword ; valuesGroupedByOrdinals -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -347,7 +347,7 @@ still_hired:boolean | job_positions:keyword ; implicitCastingEqual -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from employees | where still_hired == "true" | sort emp_no | keep emp_no | limit 1; emp_no:integer @@ -355,7 +355,7 @@ emp_no:integer ; implicitCastingNotEqual -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from employees | where still_hired != "true" | sort emp_no | keep emp_no | limit 1; emp_no:integer @@ -363,7 +363,7 @@ emp_no:integer ; implicitCastingIn -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from employees | where still_hired in ("true", "false") | sort emp_no | keep emp_no | limit 1; emp_no:integer @@ -371,7 +371,7 @@ emp_no:integer ; implicitCastingInField -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from employees | where false in ("true", still_hired) | sort emp_no | keep emp_no | limit 1; emp_no:integer diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec index aa6529c2d431..508cccc20b86 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/cartesian_multipolygons.csv-spec @@ -6,7 +6,7 @@ # Test against a polygon similar in size to the Bottom Left polygon whereIntersectsSinglePolygon -required_feature: esql.st_intersects +required_capability: st_intersects FROM cartesian_multipolygons | WHERE ST_Intersects(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 1 0, 1 1, 0 1, 0 0))")) @@ -25,7 +25,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereContainsSinglePolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM cartesian_multipolygons | WHERE ST_Contains(shape, TO_CARTESIANSHAPE("POLYGON((0.001 0.001, 0.999 0.001, 0.999 0.999, 0.001 0.999, 0.001 0.001))")) @@ -38,7 +38,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereWithinSinglePolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM cartesian_multipolygons | WHERE ST_Within(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 1 0, 1 1, 0 1, 0 0))")) @@ -53,7 +53,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereDisjointSinglePolygon -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM cartesian_multipolygons | WHERE ST_Disjoint(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 1 0, 1 1, 0 1, 0 0))")) @@ -79,7 +79,7 @@ id:l | name:keyword | shape:cartesian_shape # Test against a polygon smaller in size to the Bottom Left polygon whereIntersectsSmallerPolygon -required_feature: esql.st_intersects +required_capability: st_intersects FROM cartesian_multipolygons | WHERE ST_Intersects(shape, TO_CARTESIANSHAPE("POLYGON((0.2 0.2, 0.8 0.2, 0.8 0.8, 0.2 0.8, 0.2 0.2))")) @@ -98,7 +98,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereContainsSmallerPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM cartesian_multipolygons | WHERE ST_Contains(shape, TO_CARTESIANSHAPE("POLYGON((0.2 0.2, 0.8 0.2, 0.8 0.8, 0.2 0.8, 0.2 0.2))")) @@ -111,7 +111,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereWithinSmallerPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM cartesian_multipolygons | WHERE ST_Within(shape, TO_CARTESIANSHAPE("POLYGON((0.2 0.2, 0.8 0.2, 0.8 0.8, 0.2 0.8, 0.2 0.2))")) @@ -123,7 +123,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereDisjointSmallerPolygon -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM cartesian_multipolygons | WHERE ST_Disjoint(shape, TO_CARTESIANSHAPE("POLYGON((0.2 0.2, 0.8 0.2, 0.8 0.8, 0.2 0.8, 0.2 0.2))")) @@ -149,7 +149,7 @@ id:l | name:keyword | shape:cartesian_shape # Test against a polygon similar in size to the entire test data whereIntersectsLargerPolygon -required_feature: esql.st_intersects +required_capability: st_intersects FROM cartesian_multipolygons | WHERE ST_Intersects(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 3 0, 3 3, 0 3, 0 0))")) @@ -180,7 +180,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereContainsLargerPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM cartesian_multipolygons | WHERE ST_Contains(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 3 0, 3 3, 0 3, 0 0))")) @@ -191,7 +191,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereWithinLargerPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM cartesian_multipolygons | WHERE ST_Within(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 3 0, 3 3, 0 3, 0 0))")) @@ -222,7 +222,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereDisjointLargerPolygon -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM cartesian_multipolygons | WHERE ST_Disjoint(shape, TO_CARTESIANSHAPE("POLYGON((0 0, 3 0, 3 3, 0 3, 0 0))")) @@ -236,7 +236,7 @@ id:l | name:keyword | shape:cartesian_shape # Test against a polygon larger than all test data whereIntersectsEvenLargerPolygon -required_feature: esql.st_intersects +required_capability: st_intersects FROM cartesian_multipolygons | WHERE ST_Intersects(shape, TO_CARTESIANSHAPE("POLYGON((-1 -1, 4 -1, 4 4, -1 4, -1 -1))")) @@ -267,7 +267,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereContainsEvenLargerPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM cartesian_multipolygons | WHERE ST_Contains(shape, TO_CARTESIANSHAPE("POLYGON((-1 -1, 4 -1, 4 4, -1 4, -1 -1))")) @@ -278,7 +278,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereWithinEvenLargerPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM cartesian_multipolygons | WHERE ST_Within(shape, TO_CARTESIANSHAPE("POLYGON((-1 -1, 4 -1, 4 4, -1 4, -1 -1))")) @@ -309,7 +309,7 @@ id:l | name:keyword | shape:cartesian_shape ; whereDisjointEvenLargerPolygon -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM cartesian_multipolygons | WHERE ST_Disjoint(shape, TO_CARTESIANSHAPE("POLYGON((-1 -1, 4 -1, 4 4, -1 4, -1 -1))")) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec index 64a8c1d9da31..d4b45ca37fc2 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec @@ -130,7 +130,7 @@ error_rate:double | hour:date nullOnMultivaluesMathOperation -required_feature: esql.disable_nullable_opts +required_capability: disable_nullable_opts ROW a = 5, b = [ 1, 2 ]| EVAL sum = a + b| LIMIT 1 | WHERE sum IS NULL; warning:Line 1:37: evaluation of [a + b] failed, treating result as null. Only first 20 failures recorded. @@ -142,7 +142,7 @@ a:integer | b:integer | sum:integer notNullOnMultivaluesMathOperation -required_feature: esql.disable_nullable_opts +required_capability: disable_nullable_opts ROW a = 5, b = [ 1, 2 ]| EVAL sum = a + b| LIMIT 1 | WHERE sum IS NOT NULL; warning:Line 1:37: evaluation of [a + b] failed, treating result as null. Only first 20 failures recorded. @@ -153,7 +153,7 @@ a:integer | b:integer | sum:integer nullOnMultivaluesComparisonOperation -required_feature: esql.disable_nullable_opts +required_capability: disable_nullable_opts ROW a = 5, b = [ 1, 2 ]| EVAL same = a == b| LIMIT 1 | WHERE same IS NULL; warning:Line 1:38: evaluation of [a == b] failed, treating result as null. Only first 20 failures recorded. @@ -166,7 +166,7 @@ a:integer | b:integer | same:boolean notNullOnMultivaluesComparisonOperation -required_feature: esql.disable_nullable_opts +required_capability: disable_nullable_opts ROW a = 5, b = [ 1, 2 ]| EVAL same = a == b| LIMIT 1 | WHERE same IS NOT NULL; warning:Line 1:38: evaluation of [a == b] failed, treating result as null. Only first 20 failures recorded. @@ -177,7 +177,7 @@ a:integer | b:integer | same:boolean notNullOnMultivaluesComparisonOperationWithPartialMatch -required_feature: esql.disable_nullable_opts +required_capability: disable_nullable_opts ROW a = 5, b = [ 5, 2 ]| EVAL same = a == b| LIMIT 1 | WHERE same IS NOT NULL; warning:Line 1:38: evaluation of [a == b] failed, treating result as null. Only first 20 failures recorded. diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/convert.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/convert.csv-spec index 43e683e165e2..94dfd9f3267f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/convert.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/convert.csv-spec @@ -1,7 +1,7 @@ // Conversion-specific tests convertToBoolean -required_feature: esql.casting_operator +required_capability: casting_operator ROW zero=0::boolean, one=1::bool ; @@ -10,7 +10,7 @@ false |true ; convertToInteger -required_feature: esql.casting_operator +required_capability: casting_operator ROW zero="0"::integer, one="1"::int ; @@ -19,7 +19,7 @@ ROW zero="0"::integer, one="1"::int ; convertToIP -required_feature: esql.casting_operator +required_capability: casting_operator ROW ip="1.1.1.1"::ip ; @@ -28,7 +28,7 @@ ROW ip="1.1.1.1"::ip ; convertToLong -required_feature: esql.casting_operator +required_capability: casting_operator ROW long="-1"::long ; @@ -37,7 +37,7 @@ long:long ; convertToLongWithWarning -required_feature: esql.casting_operator +required_capability: casting_operator ROW long="1.1.1.1"::long ; warning:Line 1:10: evaluation of [\"1.1.1.1\"::long] failed, treating result as null. Only first 20 failures recorded. @@ -48,7 +48,7 @@ null ; convertToDouble -required_feature: esql.casting_operator +required_capability: casting_operator ROW zero="0"::double ; @@ -57,7 +57,7 @@ ROW zero="0"::double ; convertToString -required_feature: esql.casting_operator +required_capability: casting_operator ROW one=1::keyword, two=2::text, three=3::string ; @@ -66,7 +66,7 @@ ROW one=1::keyword, two=2::text, three=3::string ; convertToDatetime -required_feature: esql.casting_operator +required_capability: casting_operator ROW date="1985-01-01T00:00:00Z"::datetime, zero=0::datetime ; @@ -75,7 +75,7 @@ ROW date="1985-01-01T00:00:00Z"::datetime, zero=0::datetime ; convertToVersion -required_feature: esql.casting_operator +required_capability: casting_operator ROW ver="1.2.3"::version ; @@ -84,7 +84,7 @@ ROW ver="1.2.3"::version ; convertToUnsignedLong -required_feature: esql.casting_operator +required_capability: casting_operator ROW zero="0"::unsigned_long, two=abs(-2)::UnsigneD_LOng ; @@ -93,7 +93,7 @@ ROW zero="0"::unsigned_long, two=abs(-2)::UnsigneD_LOng ; convertToGeoPoint -required_feature: esql.casting_operator +required_capability: casting_operator ROW gp="POINT(0 0)"::geo_point ; @@ -102,7 +102,7 @@ POINT (0.0 0.0) ; convertToGeoShape -required_feature: esql.casting_operator +required_capability: casting_operator ROW gs="POINT(0 0)"::geo_shape ; @@ -111,7 +111,7 @@ POINT (0.0 0.0) ; convertToCartesianPoint -required_feature: esql.casting_operator +required_capability: casting_operator ROW cp="POINT(0 0)"::cartesian_point ; @@ -120,7 +120,7 @@ POINT (0.0 0.0) ; convertToCartesianShape -required_feature: esql.casting_operator +required_capability: casting_operator ROW cs="POINT(0 0)"::cartesian_shape ; @@ -129,7 +129,7 @@ POINT (0.0 0.0) ; convertChained -required_feature: esql.casting_operator +required_capability: casting_operator ROW one=1::STRING::LONG::BOOL ; @@ -138,7 +138,7 @@ true ; convertWithIndexMultipleConversionsInSameExpressionAndConversionInFiltering -required_feature: esql.casting_operator +required_capability: casting_operator FROM employees | EVAL en_str=emp_no::STRING, bd=ABS(birth_date::LONG)::STRING | KEEP en_str, emp_no, bd, birth_date @@ -153,7 +153,7 @@ required_feature: esql.casting_operator ; convertWithBoolExpressionAndQualifiedName -required_feature: esql.casting_operator +required_capability: casting_operator FROM employees | EVAL neg = (NOT still_hired)::string, sf = ROUND(height.scaled_float::double, 2) | KEEP emp_no, still_hired, neg, sf @@ -169,7 +169,7 @@ required_feature: esql.casting_operator ; docsCastOperator -required_feature: esql.casting_operator +required_capability: casting_operator //tag::docsCastOperator[] ROW ver = CONCAT(("0"::INT + 1)::STRING, ".2.3")::VERSION //end::docsCastOperator[] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 8d54288de552..22e9231939d0 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -216,7 +216,7 @@ string:keyword |datetime:date ; convertFromUnsignedLong -required_feature:esql.convert_warn +required_capability: convert_warn row ul = [9223372036854775808, 520128000000] | eval dt = to_datetime(ul); warning:Line 1:58: evaluation of [to_datetime(ul)] failed, treating result as null. Only first 20 failures recorded. @@ -357,7 +357,7 @@ date1:date | date2:date | dd_ms:integer ; evalDateDiffString -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting ROW date1 = TO_DATETIME("2023-12-02T11:00:00.000Z") | EVAL dd_ms = DATE_DIFF("microseconds", date1, "2023-12-02T11:00:00.001Z") @@ -623,7 +623,7 @@ dt:datetime |plus_post:datetime |plus_pre:datetime datePlusQuarter # "quarter" introduced in 8.15 -required_feature: esql.timespan_abbreviations +required_capability: timespan_abbreviations row dt = to_dt("2100-01-01T01:01:01.000Z") | eval plusQuarter = dt + 2 quarters ; @@ -634,7 +634,7 @@ dt:datetime | plusQuarter:datetime datePlusAbbreviatedDurations # abbreviations introduced in 8.15 -required_feature: esql.timespan_abbreviations +required_capability: timespan_abbreviations row dt = to_dt("2100-01-01T00:00:00.000Z") | eval plusDurations = dt + 1 h + 2 min + 2 sec + 1 s + 4 ms ; @@ -645,7 +645,7 @@ dt:datetime | plusDurations:datetime datePlusAbbreviatedPeriods # abbreviations introduced in 8.15 -required_feature: esql.timespan_abbreviations +required_capability: timespan_abbreviations row dt = to_dt("2100-01-01T00:00:00.000Z") | eval plusDurations = dt + 0 yr + 1y + 2 q + 3 mo + 4 w + 3 d ; @@ -855,7 +855,7 @@ date:date | year:long ; dateExtractString -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting ROW date = DATE_PARSE("yyyy-MM-dd", "2022-05-06") | EVAL year = DATE_EXTRACT("year", "2022-05-06") @@ -896,7 +896,7 @@ Anneke |Preusig |1989-06-02T00:00:00.000Z|1989-06-02 ; evalDateFormatString -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting ROW a = 1 | EVAL df = DATE_FORMAT("YYYY-MM-dd", "1989-06-02T00:00:00.000Z") @@ -925,7 +925,7 @@ Anneke |Preusig |1989-06-02T00:00:00.000Z|1989-01-01T00:00:00.000 ; evalDateTruncString -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting ROW a = 1 | EVAL year_hired = DATE_TRUNC(1 year, "1991-06-26T00:00:00.000Z") @@ -990,7 +990,7 @@ FROM sample_data ; mvSort -required_feature: esql.mv_sort +required_capability: mv_sort row a = ["1985-01-01T00:00:00.000Z", "1986-01-01T00:00:00.000Z", "1987-01-01T00:00:00.000Z"] | eval datetime = TO_DATETIME(a) @@ -1019,7 +1019,7 @@ count:long | age:long ; values -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10003 @@ -1031,7 +1031,7 @@ required_feature: esql.agg_values ; valuesGrouped -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -1052,7 +1052,7 @@ required_feature: esql.agg_values ; valuesGroupedByOrdinals -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -1077,7 +1077,7 @@ required_feature: esql.agg_values ; implicitCastingNotEqual -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting from employees | where birth_date != "1957-05-23T00:00:00Z" | keep emp_no, birth_date | sort emp_no | limit 3; emp_no:integer | birth_date:datetime @@ -1087,7 +1087,7 @@ emp_no:integer | birth_date:datetime ; implicitCastingLessThanOrEqual -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting from employees | where birth_date <= "1957-05-20T00:00:00Z" | keep emp_no, birth_date | sort emp_no | limit 3; emp_no:integer | birth_date:datetime @@ -1097,7 +1097,7 @@ emp_no:integer | birth_date:datetime ; implicitCastingGreaterThan -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting from employees | where birth_date > "1957-05-24T00:00:00Z" | keep emp_no, birth_date | sort emp_no | limit 3; emp_no:integer | birth_date:datetime diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec index f044989ec9cc..bd384886f0dd 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec @@ -32,7 +32,7 @@ median_duration:double | env:keyword ; simple -required_feature: esql.enrich_load +required_capability: enrich_load // tag::enrich[] ROW language_code = "1" @@ -47,7 +47,7 @@ language_code:keyword | language_name:keyword ; enrichOnSimple -required_feature: esql.enrich_load +required_capability: enrich_load // tag::enrich_on[] ROW a = "1" @@ -63,7 +63,7 @@ a:keyword | language_name:keyword enrichOn -required_feature: esql.enrich_load +required_capability: enrich_load from employees | sort emp_no | limit 1 | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name; @@ -73,7 +73,7 @@ emp_no:integer | language_name:keyword enrichOn2 -required_feature: esql.enrich_load +required_capability: enrich_load from employees | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name | sort emp_no | limit 1 ; @@ -83,7 +83,7 @@ emp_no:integer | language_name:keyword simpleSortLimit -required_feature: esql.enrich_load +required_capability: enrich_load from employees | eval x = to_string(languages) | enrich languages_policy on x | keep emp_no, language_name | sort emp_no | limit 1; @@ -92,7 +92,7 @@ emp_no:integer | language_name:keyword ; with -required_feature: esql.enrich_load +required_capability: enrich_load from employees | eval x = to_string(languages) | keep emp_no, x | sort emp_no | limit 1 | enrich languages_policy on x with language_name; @@ -103,7 +103,7 @@ emp_no:integer | x:keyword | language_name:keyword withSimple -required_feature: esql.enrich_load +required_capability: enrich_load // tag::enrich_with[] ROW a = "1" @@ -119,7 +119,7 @@ a:keyword | language_name:keyword withAlias -required_feature: esql.enrich_load +required_capability: enrich_load from employees | sort emp_no | limit 3 | eval x = to_string(languages) | keep emp_no, x | enrich languages_policy on x with lang = language_name; @@ -131,7 +131,7 @@ emp_no:integer | x:keyword | lang:keyword ; withAliasSimple -required_feature: esql.enrich_load +required_capability: enrich_load // tag::enrich_rename[] ROW a = "1" @@ -147,7 +147,7 @@ a:keyword | name:keyword withAliasSort -required_feature: esql.enrich_load +required_capability: enrich_load from employees | eval x = to_string(languages) | keep emp_no, x | sort emp_no | limit 3 | enrich languages_policy on x with lang = language_name; @@ -160,7 +160,7 @@ emp_no:integer | x:keyword | lang:keyword withAliasOverwriteName#[skip:-8.13.0] -required_feature: esql.enrich_load +required_capability: enrich_load from employees | sort emp_no | eval x = to_string(languages) | enrich languages_policy on x with emp_no = language_name @@ -172,7 +172,7 @@ French ; withAliasAndPlain -required_feature: esql.enrich_load +required_capability: enrich_load from employees | sort emp_no desc | limit 3 | eval x = to_string(languages) | keep emp_no, x | enrich languages_policy on x with lang = language_name, language_name; @@ -185,7 +185,7 @@ emp_no:integer | x:keyword | lang:keyword | language_name:keyword withTwoAliasesSameProp -required_feature: esql.enrich_load +required_capability: enrich_load from employees | sort emp_no | limit 1 | eval x = to_string(languages) | keep emp_no, x | enrich languages_policy on x with lang = language_name, lang2 = language_name; @@ -196,7 +196,7 @@ emp_no:integer | x:keyword | lang:keyword | lang2:keyword redundantWith -required_feature: esql.enrich_load +required_capability: enrich_load from employees | sort emp_no | limit 1 | eval x = to_string(languages) | keep emp_no, x | enrich languages_policy on x with language_name, language_name; @@ -207,7 +207,7 @@ emp_no:integer | x:keyword | language_name:keyword nullInput -required_feature: esql.enrich_load +required_capability: enrich_load from employees | where emp_no == 10017 | keep emp_no, gender | enrich languages_policy on gender with language_name, language_name; @@ -218,7 +218,7 @@ emp_no:integer | gender:keyword | language_name:keyword constantNullInput -required_feature: esql.enrich_load +required_capability: enrich_load from employees | where emp_no == 10020 | eval x = to_string(languages) | keep emp_no, x | enrich languages_policy on x with language_name, language_name; @@ -229,7 +229,7 @@ emp_no:integer | x:keyword | language_name:keyword multipleEnrich -required_feature: esql.enrich_load +required_capability: enrich_load row a = "1", b = "2", c = "10" | enrich languages_policy on a with a_lang = language_name @@ -242,7 +242,7 @@ a:keyword | b:keyword | c:keyword | a_lang:keyword | b_lang:keyword | c_lang:key enrichEval -required_feature: esql.enrich_load +required_capability: enrich_load from employees | eval x = to_string(languages) | enrich languages_policy on x with lang = language_name @@ -258,8 +258,8 @@ emp_no:integer | x:keyword | lang:keyword | language:keyword multivalue -required_feature: esql.enrich_load -required_feature: esql.mv_sort +required_capability: enrich_load +required_capability: mv_sort row a = ["1", "2"] | enrich languages_policy on a with a_lang = language_name | eval a_lang = mv_sort(a_lang); @@ -269,7 +269,7 @@ a:keyword | a_lang:keyword enrichCidr#[skip:-8.13.99, reason:enrich for cidr added in 8.14.0] -required_feature: esql.enrich_load +required_capability: enrich_load FROM sample_data | ENRICH client_cidr_policy ON client_ip WITH env @@ -290,7 +290,7 @@ client_ip:ip | count_env:i | max_env:keyword enrichCidr2#[skip:-8.99.99, reason:ip_range support not added yet] -required_feature: esql.enrich_load +required_capability: enrich_load FROM sample_data | ENRICH client_cidr_policy ON client_ip WITH env, client_cidr @@ -310,7 +310,7 @@ client_ip:ip | env:keyword | client_cidr:ip_range enrichAgesStatsYear#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -required_feature: esql.enrich_load +required_capability: enrich_load FROM employees | WHERE birth_date > "1960-01-01" @@ -333,7 +333,7 @@ birth_year:long | age_group:keyword | count:long enrichAgesStatsAgeGroup#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -required_feature: esql.enrich_load +required_capability: enrich_load FROM employees | WHERE birth_date IS NOT NULL @@ -350,7 +350,7 @@ count:long | age_group:keyword enrichHeightsStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -required_feature: esql.enrich_load +required_capability: enrich_load FROM employees | ENRICH heights_policy ON height WITH height_group = description @@ -369,7 +369,7 @@ Very Tall | 2.0 | 2.1 | 20 enrichDecadesStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -required_feature: esql.enrich_load +required_capability: enrich_load FROM employees | ENRICH decades_policy ON birth_date WITH birth_decade = decade, birth_description = description @@ -390,7 +390,7 @@ null | 1980 | null | Radical Eighties | 4 spatialEnrichmentKeywordMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -required_feature: esql.enrich_load +required_capability: enrich_load FROM airports | WHERE abbrev == "CPH" @@ -405,7 +405,7 @@ CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark spatialEnrichmentGeoMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -required_feature: esql.enrich_load +required_capability: enrich_load FROM airports | WHERE abbrev == "CPH" @@ -420,8 +420,8 @@ CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark spatialEnrichmentGeoMatchStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -required_feature: esql.enrich_load -required_feature: esql.mv_warn +required_capability: enrich_load +required_capability: mv_warn FROM airports | ENRICH city_boundaries ON city_location WITH airport, region, city_boundary @@ -437,7 +437,7 @@ POINT(1.396561 24.127649) | 872 | 88 | 1044 spatialEnrichmentKeywordMatchAndSpatialPredicate#[skip:-8.13.99, reason:st_intersects added in 8.14] -required_feature: esql.enrich_load +required_capability: enrich_load FROM airports | ENRICH city_names ON city WITH airport, region, city_boundary @@ -455,7 +455,7 @@ count:long | airport_in_city:boolean spatialEnrichmentKeywordMatchAndSpatialAggregation#[skip:-8.13.99, reason:st_intersects added in 8.14] -required_feature: esql.enrich_load +required_capability: enrich_load FROM airports | ENRICH city_names ON city WITH airport, region, city_boundary @@ -473,7 +473,7 @@ count:long | centroid:geo_point | airport_in_city:boolean spatialEnrichmentTextMatch#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] -required_feature: esql.enrich_load +required_capability: enrich_load FROM airports | WHERE abbrev == "IDR" diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec index 85b665d71744..571d7835451c 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec @@ -201,7 +201,7 @@ Kyoichi. |Kyoichi.Maliniak |Kyoichi.MaliniakKyoichi. |Kyoichi ; roundArrays -required_feature: esql.disable_nullable_opts +required_capability: disable_nullable_opts row a = [1.2], b = [2.4, 7.9] | eval c = round(a), d = round(b), e = round([1.2]), f = round([1.2, 4.6]), g = round([1.14], 1), h = round([1.14], [1, 2]); warning:Line 1:56: evaluation of [round(b)] failed, treating result as null. Only first 20 failures recorded. diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec index 8af770c52124..1f2bcb6b5120 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec @@ -92,7 +92,7 @@ int:integer |dbl:double ; lessThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where salary_change < 1 | keep emp_no, salary_change | sort emp_no | limit 5; warning:Line 1:24: evaluation of [salary_change < 1] failed, treating result as null. Only first 20 failures recorded. @@ -108,7 +108,7 @@ emp_no:integer |salary_change:double ; greaterThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where salary_change > 1 | keep emp_no, salary_change | sort emp_no | limit 5; warning:Line 1:24: evaluation of [salary_change > 1] failed, treating result as null. Only first 20 failures recorded. @@ -124,7 +124,7 @@ emp_no:integer |salary_change:double ; equalToMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where salary_change == 1.19 | keep emp_no, salary_change | sort emp_no; warning:Line 1:24: evaluation of [salary_change == 1.19] failed, treating result as null. Only first 20 failures recorded. @@ -136,7 +136,7 @@ emp_no:integer |salary_change:double ; equalToOrEqualToMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where salary_change == 1.19 or salary_change == 7.58 | keep emp_no, salary_change | sort emp_no; warning:Line 1:24: evaluation of [salary_change] failed, treating result as null. Only first 20 failures recorded. @@ -149,7 +149,7 @@ emp_no:integer |salary_change:double ; inMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where salary_change in (1.19, 7.58) | keep emp_no, salary_change | sort emp_no; warning:Line 1:24: evaluation of [salary_change in (1.19, 7.58)] failed, treating result as null. Only first 20 failures recorded. @@ -162,7 +162,7 @@ emp_no:integer |salary_change:double ; notLessThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where not(salary_change < 1) | keep emp_no, salary_change | sort emp_no | limit 5; warning:Line 1:24: evaluation of [not(salary_change < 1)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change < 1] failed, treating result as null. Only first 20 failures recorded.] @@ -178,7 +178,7 @@ emp_no:integer |salary_change:double ; notGreaterThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where not(salary_change > 1) | keep emp_no, salary_change | sort emp_no | limit 5; warning:Line 1:24: evaluation of [not(salary_change > 1)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change > 1] failed, treating result as null. Only first 20 failures recorded.] @@ -194,7 +194,7 @@ emp_no:integer |salary_change:double ; notEqualToMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where not(salary_change == 1.19) | keep emp_no, salary_change | sort emp_no | limit 5; warning:Line 1:24: evaluation of [not(salary_change == 1.19)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change == 1.19] failed, treating result as null. Only first 20 failures recorded.] @@ -241,7 +241,7 @@ row a = [1.1, 2.1, 2.1] | eval da = mv_dedupe(a); ; mvSliceEmp -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval a1 = mv_slice(salary_change, 0, 1) @@ -436,7 +436,7 @@ ROW deg = [90.0, 180.0, 270.0] ; mvSort -required_feature: esql.mv_sort +required_capability: mv_sort row a = [4.0, 2.0, -3.0, 2.0] | eval sa = mv_sort(a), sd = mv_sort(a, "DESC"); @@ -445,7 +445,7 @@ a:double | sa:double | sd:double ; mvSortEmp -required_feature: esql.mv_sort +required_capability: mv_sort FROM employees | eval sd = mv_sort(salary_change, "DESC"), sa = mv_sort(salary_change) @@ -467,7 +467,7 @@ emp_no:integer | salary_change:double | sa:double | sd:double ; values -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -479,7 +479,7 @@ required_feature: esql.agg_values ; valuesGrouped -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -500,7 +500,7 @@ required_feature: esql.agg_values ; valuesGroupedByOrdinals -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec index c2c0b82f1a66..00a8c0da8f14 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/from.csv-spec @@ -130,7 +130,7 @@ c:l | name:k ; convertFromDatetimeWithOptions -required_feature: esql.from_options +required_capability: from_options // tag::convertFromDatetimeWithOptions[] FROM employees OPTIONS "allow_no_indices"="false","preference"="_local" diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec index 69ae951e4290..e247d6c3a04e 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec @@ -1,7 +1,7 @@ // Integral types-specific tests inLongAndInt -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where avg_worked_seconds in (372957040, salary_change.long, 236703986) | where emp_no in (10017, emp_no - 1) | keep emp_no, avg_worked_seconds; warning:Line 1:24: evaluation of [avg_worked_seconds in (372957040, salary_change.long, 236703986)] failed, treating result as null. Only first 20 failures recorded. @@ -68,7 +68,7 @@ long:long |ul:ul ; convertDoubleToUL -required_feature:esql.convert_warn +required_capability: convert_warn row d = 123.4 | eval ul = to_ul(d), overflow = to_ul(1e20); warningRegex:Line 1:48: evaluation of \[to_ul\(1e20\)\] failed, treating result as null. Only first 20 failures recorded. @@ -127,7 +127,7 @@ int:integer |long:long ; convertULToLong -required_feature:esql.convert_warn +required_capability: convert_warn row ul = [9223372036854775807, 9223372036854775808] | eval long = to_long(ul); warningRegex:Line 1:67: evaluation of \[to_long\(ul\)\] failed, treating result as null. Only first 20 failures recorded. @@ -170,7 +170,7 @@ str1:keyword |str2:keyword |str3:keyword |long1:long |long2:long |long3:long ; convertDoubleToLong -required_feature:esql.convert_warn +required_capability: convert_warn row d = 123.4 | eval d2l = to_long(d), overflow = to_long(1e19); warningRegex:Line 1:51: evaluation of \[to_long\(1e19\)\] failed, treating result as null. Only first 20 failures recorded. @@ -190,7 +190,7 @@ int:integer |ii:integer ; convertLongToInt -required_feature:esql.convert_warn +required_capability: convert_warn // tag::to_int-long[] ROW long = [5013792, 2147483647, 501379200000] @@ -207,7 +207,7 @@ long:long |int:integer ; convertULToInt -required_feature:esql.convert_warn +required_capability: convert_warn row ul = [2147483647, 9223372036854775808] | eval int = to_int(ul); warningRegex:Line 1:57: evaluation of \[to_int\(ul\)\] failed, treating result as null. Only first 20 failures recorded. @@ -239,7 +239,7 @@ int_str:keyword |int_dbl_str:keyword |is2i:integer|ids2i:integer ; convertStringToIntFail#[skip:-8.13.99, reason:warning changed in 8.14] -required_feature: esql.mv_warn +required_capability: mv_warn row str1 = "2147483647.2", str2 = "2147483648", non = "no number" | eval i1 = to_integer(str1), i2 = to_integer(str2), noi = to_integer(non); warning:Line 1:79: evaluation of [to_integer(str1)] failed, treating result as null. Only first 20 failures recorded. @@ -254,7 +254,7 @@ str1:keyword |str2:keyword |non:keyword |i1:integer |i2:integer | ; convertDoubleToInt -required_feature:esql.convert_warn +required_capability: convert_warn row d = 123.4 | eval d2i = to_integer(d), overflow = to_integer(1e19); warningRegex:Line 1:54: evaluation of \[to_integer\(1e19\)\] failed, treating result as null. Only first 20 failures recorded. @@ -265,7 +265,7 @@ d:double |d2i:integer |overflow:integer ; lessThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where salary_change.int < 1 | keep emp_no, salary_change.int | sort emp_no | limit 5; warning:Line 1:24: evaluation of [salary_change.int < 1] failed, treating result as null. Only first 20 failures recorded. @@ -281,7 +281,7 @@ emp_no:integer |salary_change.int:integer ; greaterThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where salary_change.int > 1 | keep emp_no, salary_change.int | sort emp_no | limit 5; warning:Line 1:24: evaluation of [salary_change.int > 1] failed, treating result as null. Only first 20 failures recorded. @@ -297,7 +297,7 @@ emp_no:integer |salary_change.int:integer ; equalToMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where salary_change.int == 0 | keep emp_no, salary_change.int | sort emp_no; warning:Line 1:24: evaluation of [salary_change.int == 0] failed, treating result as null. Only first 20 failures recorded. @@ -312,7 +312,7 @@ emp_no:integer |salary_change.int:integer ; equalToOrEqualToMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where salary_change.int == 1 or salary_change.int == 8 | keep emp_no, salary_change.int | sort emp_no; warning:Line 1:24: evaluation of [salary_change.int] failed, treating result as null. Only first 20 failures recorded. @@ -325,7 +325,7 @@ emp_no:integer |salary_change.int:integer ; inMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where salary_change.int in (1, 7) | keep emp_no, salary_change.int | sort emp_no; warning:Line 1:24: evaluation of [salary_change.int in (1, 7)] failed, treating result as null. Only first 20 failures recorded. @@ -338,7 +338,7 @@ emp_no:integer |salary_change.int:integer ; notLessThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where not(salary_change.int < 1) | keep emp_no, salary_change.int | sort emp_no | limit 5; warning:Line 1:24: evaluation of [not(salary_change.int < 1)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change.int < 1] failed, treating result as null. Only first 20 failures recorded.] @@ -354,7 +354,7 @@ emp_no:integer |salary_change.int:integer ; notGreaterThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where not(salary_change.int > 1) | keep emp_no, salary_change.int | sort emp_no | limit 5; warning:Line 1:24: evaluation of [not(salary_change.int > 1)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change.int > 1] failed, treating result as null. Only first 20 failures recorded.] @@ -370,7 +370,7 @@ emp_no:integer |salary_change.int:integer ; notEqualToMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where not(salary_change.int == 1) | keep emp_no, salary_change.int | sort emp_no | limit 5; warning:Line 1:24: evaluation of [not(salary_change.int == 1)] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [salary_change.int == 1] failed, treating result as null. Only first 20 failures recorded.] @@ -417,7 +417,7 @@ row a = [1, 2, 2, 3] | eval da = mv_dedupe(a); ; mvSort -required_feature: esql.mv_sort +required_capability: mv_sort // tag::mv_sort[] ROW a = [4, 2, -3, 2] @@ -432,7 +432,7 @@ a:integer | sa:integer | sd:integer ; mvSortEmpInt -required_feature: esql.mv_sort +required_capability: mv_sort FROM employees | eval sd = mv_sort(salary_change.int, "DESC"), sa = mv_sort(salary_change.int) @@ -454,7 +454,7 @@ emp_no:integer | salary_change.int:integer | sa:integer | sd:integer ; mvSortEmpLong -required_feature: esql.mv_sort +required_capability: mv_sort FROM employees | eval sd = mv_sort(salary_change.long, "DESC"), sa = mv_sort(salary_change.long) @@ -476,7 +476,7 @@ emp_no:integer | salary_change.long:long | sa:long | sd:long ; mvSlice -required_feature: esql.mv_sort +required_capability: mv_sort // tag::mv_slice_positive[] row a = [1, 2, 2, 3] @@ -490,7 +490,7 @@ a:integer | a1:integer | a2:integer ; mvSliceNegativeOffset -required_feature: esql.mv_sort +required_capability: mv_sort // tag::mv_slice_negative[] row a = [1, 2, 2, 3] @@ -504,7 +504,7 @@ a:integer | a1:integer | a2:integer ; mvSliceSingle -required_feature: esql.mv_sort +required_capability: mv_sort row a = 1 | eval a1 = mv_slice(a, 0); @@ -514,7 +514,7 @@ a:integer | a1:integer ; mvSliceOutOfBound -required_feature: esql.mv_sort +required_capability: mv_sort row a = [1, 2, 2, 3] | eval a1 = mv_slice(a, 4), a2 = mv_slice(a, 2, 6), a3 = mv_slice(a, 4, 6); @@ -524,7 +524,7 @@ a:integer | a1:integer | a2:integer | a3:integer ; mvSliceEmpInt -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval a1 = mv_slice(salary_change.int, 0, 1) @@ -541,7 +541,7 @@ emp_no:integer | salary_change.int:integer | a1:integer ; mvSliceEmpIntSingle -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval a1 = mv_slice(salary_change.int, 1) @@ -558,7 +558,7 @@ emp_no:integer | salary_change.int:integer | a1:integer ; mvSliceEmpIntEndOutOfBound -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval a1 = mv_slice(salary_change.int, 1, 4) @@ -575,7 +575,7 @@ emp_no:integer | salary_change.int:integer | a1:integer ; mvSliceEmpIntOutOfBound -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval a1 = mv_slice(salary_change.int, 2, 4) @@ -592,7 +592,7 @@ emp_no:integer | salary_change.int:integer | a1:integer ; mvSliceEmpIntStartOutOfBoundNegative -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval a1 = mv_slice(salary_change.int, -5, -2) @@ -609,7 +609,7 @@ emp_no:integer | salary_change.int:integer | a1:integer ; mvSliceEmpIntOutOfBoundNegative -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval a1 = mv_slice(salary_change.int, -5, -3) @@ -626,7 +626,7 @@ emp_no:integer | salary_change.int:integer | a1:integer ; mvSliceEmpLong -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval a1 = mv_slice(salary_change.long, 0, 1) @@ -750,7 +750,7 @@ x:long ; valuesLong -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -762,7 +762,7 @@ required_feature: esql.agg_values ; valuesLongGrouped -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -783,7 +783,7 @@ required_feature: esql.agg_values ; valuesLongGroupedByOrdinals -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -807,7 +807,7 @@ required_feature: esql.agg_values ; valuesInt -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -819,7 +819,7 @@ required_feature: esql.agg_values ; valuesIntGrouped -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -840,7 +840,7 @@ l:integer | first_letter:keyword ; valuesIntGroupedByOrdinals -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -864,7 +864,7 @@ required_feature: esql.agg_values ; valuesShort -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -876,7 +876,7 @@ required_feature: esql.agg_values ; valuesShortGrouped -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -897,7 +897,7 @@ l:integer | first_letter:keyword ; valuesShortGroupedByOrdinals -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec index 8d3c0c9186c6..ae683acbb2c3 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec @@ -16,7 +16,7 @@ eth2 |epsilon |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece ; equals -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | sort host, card | where ip0 == ip1 | keep card, host, ip0, ip1; warning:Line 1:38: evaluation of [ip0 == ip1] failed, treating result as null. Only first 20 failures recorded. @@ -60,7 +60,7 @@ eth2 |epsilon |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece ; lessThan -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | sort host, card, ip1 | where ip0 < ip1 | keep card, host, ip0, ip1; warning:Line 1:43: evaluation of [ip0 < ip1] failed, treating result as null. Only first 20 failures recorded. @@ -73,7 +73,7 @@ lo0 |gamma |fe80::cae2:65ff:fece:feb9|fe81::cae2:65ff:fece:f ; notEquals -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | sort host, card, ip1 | where ip0 != ip1 | keep card, host, ip0, ip1; warning:Line 1:43: evaluation of [ip0 != ip1] failed, treating result as null. Only first 20 failures recorded. @@ -125,7 +125,7 @@ null |[127.0.0.1, 127.0.0.2, 127.0.0.3] ; conditional -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | eval eq=case(ip0==ip1, ip0, ip1) | keep eq, ip0, ip1; ignoreOrder:true @@ -146,7 +146,7 @@ fe80::cae2:65ff:fece:fec1 |[fe80::cae2:65ff:fece:feb ; in -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | eval eq=case(ip0==ip1, ip0, ip1) | where eq in (ip0, ip1) | keep card, host, ip0, ip1, eq; ignoreOrder:true @@ -168,7 +168,7 @@ eth0 |epsilon |[fe80::cae2:65ff:fece:feb9, fe80::cae2:65ff:fece inWithWarningsRegex#[skip:-8.13.99, reason:regex warnings in tests introduced in v 8.14.0] -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | eval eq=case(ip0==ip1, ip0, ip1) | where eq in (ip0, ip1) | keep card, host, ip0, ip1, eq; ignoreOrder:true @@ -188,7 +188,7 @@ eth0 |epsilon |[fe80::cae2:65ff:fece:feb9, fe80::cae2:65ff:fece ; cidrMatchSimple -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | where cidr_match(ip1, "127.0.0.2/32") | keep card, host, ip0, ip1; warning:Line 1:20: evaluation of [cidr_match(ip1, \"127.0.0.2/32\")] failed, treating result as null. Only first 20 failures recorded. @@ -199,7 +199,7 @@ eth1 |beta |127.0.0.1 |127.0.0.2 ; cidrMatchNullField -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | where cidr_match(ip0, "127.0.0.2/32") is null | keep card, host, ip0, ip1; ignoreOrder:true @@ -213,7 +213,7 @@ eth2 |epsilon |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece ; cdirMatchMultipleArgs -required_feature: esql.mv_warn +required_capability: mv_warn //tag::cdirMatchMultipleArgs[] FROM hosts @@ -233,7 +233,7 @@ eth0 |gamma |fe80::cae2:65ff:fece:feb9|127.0.0.3 ; cidrMatchFunctionArg -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | where cidr_match(ip1, concat("127.0.0.2", "/32"), "127.0.0.3/32") | keep card, host, ip0, ip1; ignoreOrder:true @@ -246,7 +246,7 @@ eth0 |gamma |fe80::cae2:65ff:fece:feb9|127.0.0.3 ; cidrMatchFieldArg -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | eval cidr="127.0.0.2" | where cidr_match(ip1, cidr, "127.0.0.3/32") | keep card, host, ip0, ip1; ignoreOrder:true @@ -294,7 +294,7 @@ eth0 |beta |127.0.0.1 |::1 ; pushDownIPWithIn -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | where ip1 in (to_ip("::1"), to_ip("127.0.0.1")) | keep card, host, ip0, ip1; ignoreOrder:true @@ -308,7 +308,7 @@ eth0 |beta |127.0.0.1 |::1 ; pushDownIPWithComparision -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | where ip1 > to_ip("127.0.0.1") | keep card, ip1; ignoreOrder:true @@ -324,7 +324,7 @@ eth0 |fe80::cae2:65ff:fece:fec1 ; mvSort -required_feature: esql.mv_sort +required_capability: mv_sort FROM hosts | eval sd = mv_sort(ip1, "DESC"), sa = mv_sort(ip1) @@ -342,7 +342,7 @@ epsilon | [fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] | [fe81::c ; mvSlice -required_feature: esql.mv_sort +required_capability: mv_sort from hosts | where host == "epsilon" @@ -358,7 +358,7 @@ epsilon | [fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] | [fe81::c ; mvSlice -required_feature: esql.mv_sort +required_capability: mv_sort from hosts | where host == "epsilon" @@ -374,7 +374,7 @@ epsilon | [fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] | [fe81::c ; mvZip -required_feature: esql.mv_sort +required_capability: mv_sort from hosts | eval zip = mv_zip(to_string(description), to_string(ip0), "@@") @@ -392,7 +392,7 @@ epsilon | null | null ; values -required_feature: esql.agg_values +required_capability: agg_values FROM hosts | STATS ip0=MV_SORT(VALUES(ip0)) @@ -403,7 +403,7 @@ required_feature: esql.agg_values ; valuesGrouped -required_feature: esql.agg_values +required_capability: agg_values FROM hosts | EVAL host=SUBSTRING(host, 0, 1) @@ -419,7 +419,7 @@ fe80::cae2:65ff:fece:feb9 | g ; valuesGroupedByOrdinals -required_feature: esql.agg_values +required_capability: agg_values FROM hosts | STATS ip0=MV_SORT(VALUES(ip0)) BY host @@ -434,7 +434,7 @@ fe80::cae2:65ff:fece:feb9 | gamma ; implictCastingEqual -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from hosts | where mv_first(ip0) == "127.0.0.1" | keep host, ip0 | sort host; host:keyword | ip0:ip @@ -445,7 +445,7 @@ beta | 127.0.0.1 ; implictCastingNotEqual -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from hosts | where mv_first(ip0) != "127.0.0.1" | keep host, ip0 | sort host, ip0 | limit 3; host:keyword | ip0:ip @@ -455,7 +455,7 @@ epsilon | [fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece:fec0] ; implictCastingGreaterThan -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from hosts | where mv_first(ip0) > "127.0.0.1" | keep host, ip0 | sort host, ip0 | limit 3; host:keyword | ip0:ip @@ -465,7 +465,7 @@ gamma | fe80::cae2:65ff:fece:feb9 ; implictCastingLessThanOrEqual -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from hosts | where mv_first(ip0) <= "127.0.0.1" | keep host, ip0 | sort host, ip0 | limit 3; host:keyword | ip0:ip @@ -475,7 +475,7 @@ beta | 127.0.0.1 ; implictCastingIn -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from hosts | where mv_first(ip0) in ( "127.0.0.1", "::1") | keep host, ip0 | sort host, ip0; host:keyword | ip0:ip diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec index e0604acbcce1..4e080bac0ed2 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec @@ -201,7 +201,7 @@ height:double | s:double ; powSalarySquared -required_feature: esql.pow_double +required_capability: pow_double from employees | eval s = pow(to_long(salary) - 75000, 2) + 10000 | keep salary, s | sort salary desc | limit 4; @@ -618,7 +618,7 @@ base:double | exponent:integer | result:double ; powIntInt -required_feature: esql.pow_double +required_capability: pow_double ROW base = 2, exponent = 2 | EVAL s = POW(base, exponent) @@ -629,7 +629,7 @@ base:integer | exponent:integer | s:double ; powIntIntPlusInt -required_feature: esql.pow_double +required_capability: pow_double row s = 1 + pow(2, 2); @@ -645,7 +645,7 @@ s:double ; powIntUL -required_feature: esql.pow_double +required_capability: pow_double row x = pow(1, 9223372036854775808); @@ -654,7 +654,7 @@ x:double ; powLongUL -required_feature: esql.pow_double +required_capability: pow_double row x = to_long(1) | eval x = pow(x, 9223372036854775808); @@ -663,7 +663,7 @@ x:double ; powUnsignedLongUL -required_feature: esql.pow_double +required_capability: pow_double row x = to_ul(1) | eval x = pow(x, 9223372036854775808); @@ -688,7 +688,7 @@ null ; powULInt -required_feature: esql.pow_double +required_capability: pow_double row x = pow(to_unsigned_long(9223372036854775807), 1); @@ -697,7 +697,7 @@ x:double ; powULIntOverrun -required_feature: esql.pow_double +required_capability: pow_double ROW x = POW(9223372036854775808, 2) ; @@ -719,7 +719,7 @@ x:double ; powULLong -required_feature: esql.pow_double +required_capability: pow_double row x = to_long(10) | eval x = pow(to_unsigned_long(10), x); @@ -728,7 +728,7 @@ x:double ; powULLongOverrun -required_feature: esql.pow_double +required_capability: pow_double row x = to_long(100) | eval x = pow(to_unsigned_long(10), x); @@ -1414,7 +1414,7 @@ Anneke |Preusig |1.56 |1.56 ; evalAbsString -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting ROW number = -1.0 | EVAL abs_number = ABS("10.0") @@ -1425,7 +1425,7 @@ number:double | abs_number:double ; functionUnderArithmeticOperationAggString -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting ROW a = 1 | eval x = date_trunc(1 month, "2024-11-22") + 2 days, y = x + 3 days @@ -1437,7 +1437,7 @@ count():long | y:date ; functionUnderArithmeticOperationString -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting from employees | eval x = date_trunc(1 month, "2024-11-22") + 2 days, y = x + 3 days diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec index bcb971804808..b4cd18f72885 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec @@ -1,5 +1,5 @@ simpleKeep -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _index, _version | sort emp_no | limit 2 | keep emp_no, _index, _version; emp_no:integer |_index:keyword |_version:long @@ -8,7 +8,7 @@ emp_no:integer |_index:keyword |_version:long ; aliasWithSameName -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _index, _version | sort emp_no | limit 2 | eval _index = _index, _version = _version | keep emp_no, _index, _version; emp_no:integer |_index:keyword |_version:long @@ -17,7 +17,7 @@ emp_no:integer |_index:keyword |_version:long ; inComparison -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _index, _version | sort emp_no | where _index == "employees" | where _version == 1 | keep emp_no | limit 2; emp_no:integer @@ -26,7 +26,7 @@ emp_no:integer ; metaIndexInAggs -required_feature: esql.metadata_fields +required_capability: metadata_fields // tag::metaIndexInAggs[] FROM employees METADATA _index, _id | STATS max = MAX(emp_no) BY _index @@ -40,7 +40,7 @@ max:integer |_index:keyword ; metaIndexAliasedInAggs -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _index | eval _i = _index | stats max = max(emp_no) by _i; @@ -49,7 +49,7 @@ max:integer |_i:keyword ; metaVersionInAggs -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _version | stats min = min(emp_no) by _version; min:integer |_version:long @@ -57,7 +57,7 @@ min:integer |_version:long ; metaVersionAliasedInAggs -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _version | eval _v = _version | stats min = min(emp_no) by _v; min:integer |_v:long @@ -65,7 +65,7 @@ min:integer |_v:long ; inAggsAndAsGroups -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _index, _version | stats max = max(_version) by _index; max:long |_index:keyword @@ -73,7 +73,7 @@ max:long |_index:keyword ; inAggsAndAsGroupsAliased -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _index, _version | eval _i = _index, _v = _version | stats max = max(_v) by _i; max:long |_i:keyword @@ -81,7 +81,7 @@ max:long |_i:keyword ; inFunction -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _index, _version | sort emp_no | where length(_index) == length("employees") | where abs(_version) == 1 | keep emp_no | limit 2; emp_no:integer @@ -90,7 +90,7 @@ emp_no:integer ; inArithmetics -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _index, _version | eval i = _version + 2 | stats min = min(emp_no) by i; min:integer |i:long @@ -98,7 +98,7 @@ min:integer |i:long ; inSort -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _index, _version | sort _version, _index, emp_no | keep emp_no, _version, _index | limit 2; emp_no:integer |_version:long |_index:keyword @@ -107,7 +107,7 @@ emp_no:integer |_version:long |_index:keyword ; withMvFunction -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _version | eval i = mv_avg(_version) + 2 | stats min = min(emp_no) by i; min:integer |i:double @@ -115,7 +115,7 @@ min:integer |i:double ; overwritten -required_feature: esql.metadata_fields +required_capability: metadata_fields from employees metadata _index, _version | sort emp_no | eval _index = 3, _version = "version" | keep emp_no, _index, _version | limit 3; emp_no:integer |_index:integer |_version:keyword @@ -125,7 +125,7 @@ emp_no:integer |_index:integer |_version:keyword ; multipleIndices -required_feature: esql.metadata_fields +required_capability: metadata_fields // tag::multipleIndices[] FROM ul_logs, apps METADATA _index, _version | WHERE id IN (13, 14) AND _version == 1 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec index 26fcca423d28..6d6b3b0782a9 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec @@ -3,7 +3,7 @@ ############################################### convertFromStringQuantize -required_feature: esql.spatial_points +required_capability: spatial_points row wkt = "POINT(42.97109629958868 14.7552534006536)" | eval pt = to_geopoint(wkt); @@ -13,7 +13,7 @@ POINT(42.97109629958868 14.7552534006536) |POINT(42.97109629958868 14.7552534006 ; convertFromString -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source // tag::to_geopoint-str[] ROW wkt = "POINT(42.97109630194 14.7552534413725)" @@ -28,7 +28,7 @@ wkt:keyword |pt:geo_point ; convertFromStringArray -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source row wkt = ["POINT(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)"] | eval pt = to_geopoint(wkt); @@ -38,7 +38,7 @@ wkt:keyword ; centroidFromStringNested -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg row wkt = "POINT(42.97109629958868 14.7552534006536)" | STATS c = ST_CENTROID_AGG(TO_GEOPOINT(wkt)); @@ -48,7 +48,7 @@ POINT(42.97109629958868 14.7552534006536) ; centroidFromString1 -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg ROW wkt = ["POINT(42.97109629958868 14.7552534006536)"] | MV_EXPAND wkt @@ -60,7 +60,7 @@ POINT(42.97109629958868 14.7552534006536) ; centroidFromString2 -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg ROW wkt = ["POINT(42.97109629958868 14.7552534006536)", "POINT(75.80929149873555 22.72774917539209)"] | MV_EXPAND wkt @@ -72,7 +72,7 @@ POINT(59.390193899162114 18.741501288022846) ; centroidFromString3 -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg ROW wkt = ["POINT(42.97109629958868 14.7552534006536)", "POINT(75.80929149873555 22.72774917539209)", "POINT(-0.030548143003023033 24.37553649504829)"] | MV_EXPAND wkt @@ -84,7 +84,7 @@ POINT(39.58327988510707 20.619513023697994) ; centroidFromString4 -required_feature: esql.st_x_y +required_capability: st_x_y ROW wkt = ["POINT(42.97109629958868 14.7552534006536)", "POINT(75.80929149873555 22.72774917539209)", "POINT(-0.030548143003023033 24.37553649504829)"] | MV_EXPAND wkt @@ -97,7 +97,7 @@ POINT(39.58327988510707 20.619513023697994) | 39.58327988510707 | 20.61951302369 ; stXFromString -required_feature: esql.st_x_y +required_capability: st_x_y // tag::st_x_y[] ROW point = TO_GEOPOINT("POINT(42.97109629958868 14.7552534006536)") @@ -112,7 +112,7 @@ POINT(42.97109629958868 14.7552534006536) | 42.97109629958868 | 14.755253400653 ; simpleLoad -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source FROM airports | WHERE scalerank == 9 | SORT abbrev | WHERE length(name) > 12; @@ -132,7 +132,7 @@ ZAH | Zāhedān | POINT(60.8628 29.4964) | Iran ; stXFromAirportsSupportsNull -required_feature: esql.st_x_y +required_capability: st_x_y FROM airports | EVAL x = FLOOR(ABS(ST_X(city_location))/200), y = FLOOR(ABS(ST_Y(city_location))/100) @@ -149,7 +149,7 @@ c:long | x:double | y:double # Tests for ST_CENTROID on GEO_POINT type centroidFromAirports -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg // tag::st_centroid_agg-airports[] FROM airports @@ -164,7 +164,7 @@ POINT(-0.030548143003023033 24.37553649504829) ; centroidFromAirportsNested -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | STATS centroid=ST_CENTROID_AGG(TO_GEOPOINT(location)) @@ -175,7 +175,7 @@ POINT (-0.03054810272375508 24.37553651570554) ; centroidFromAirportsCount -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() @@ -186,7 +186,7 @@ POINT(-0.030548143003023033 24.37553649504829) | 891 ; centroidFromAirportsCountGrouped -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() BY scalerank @@ -205,7 +205,7 @@ POINT(1.2588642098541771 24.379140841774642) | 63 | 2 ; centroidFromAirportsFiltered -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | WHERE scalerank == 9 @@ -217,7 +217,7 @@ POINT(83.27726172452623 28.99289782286029) | 33 ; centroidFromAirportsCountGroupedCentroid -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() BY scalerank @@ -229,7 +229,7 @@ POINT (7.572387259169772 26.836561792945492) | 891 ; centroidFromAirportsCountCityLocations -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | STATS centroid=ST_CENTROID_AGG(city_location), count=COUNT() @@ -240,7 +240,7 @@ POINT (1.3965610809060276 24.127649406297987) | 891 ; centroidFromAirportsCountGroupedCountry -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | STATS centroid=ST_CENTROID_AGG(city_location), count=COUNT() BY country @@ -269,7 +269,7 @@ POINT (70.7946499697864 30.69746997440234) | 10 | Pakistan ; centroidFromAirportsFilteredCountry -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | WHERE country == "United States" @@ -281,7 +281,7 @@ POINT (-97.3333946136801 38.07953176370194) | 129 ; centroidFromAirportsCountGroupedCountryCentroid -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | STATS centroid=ST_CENTROID_AGG(city_location), count=COUNT() BY country @@ -293,7 +293,7 @@ POINT (17.55538044598613 18.185558743854063) | 891 ; centroidFromAirportsCountryCount -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | STATS airports=ST_CENTROID_AGG(location), cities=ST_CENTROID_AGG(city_location), count=COUNT() @@ -304,7 +304,7 @@ POINT(-0.030548143003023033 24.37553649504829) | POINT (1.3965610809060276 24.12 ; centroidFromAirportsFilteredAndSorted -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | WHERE scalerank == 9 @@ -318,7 +318,7 @@ POINT(78.73736493755132 26.761841227998957) | 12 ; centroidFromAirportsAfterMvExpand -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | MV_EXPAND type @@ -330,7 +330,7 @@ POINT(2.121611400672094 24.559172889205755) | 933 ; centroidFromAirportsGroupedAfterMvExpand -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | MV_EXPAND type @@ -350,7 +350,7 @@ POINT(1.2588642098541771 24.379140841774642) | 63 | 2 ; centroidFromAirportsGroupedAfterMvExpandFiltered -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | WHERE scalerank == 9 @@ -363,7 +363,7 @@ POINT(83.16847535921261 28.79002037679311) | 40 | 9 ; centroidFromAirportsAfterMvExpandFiltered -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | WHERE scalerank == 9 @@ -376,7 +376,7 @@ POINT(83.16847535921261 28.79002037679311) | 40 ; centroidFromAirportsAfterKeywordPredicateCountryUK -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports | WHERE country == "United Kingdom" @@ -388,7 +388,7 @@ POINT (-2.597342072712148 54.33551226578214) | 17 ; centroidFromAirportsAfterIntersectsPredicateCountryUK -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports | WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) @@ -400,7 +400,7 @@ POINT (-2.597342072712148 54.33551226578214) | 17 ; centroidFromAirportsAfterContainsPredicateCountryUK -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports | WHERE ST_CONTAINS(TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))"), location) @@ -412,7 +412,7 @@ POINT (-2.597342072712148 54.33551226578214) | 17 ; centroidFromAirportsAfterWithinPredicateCountryUK -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports | WHERE ST_WITHIN(location, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) @@ -424,7 +424,7 @@ POINT (-2.597342072712148 54.33551226578214) | 17 ; intersectsAfterCentroidFromAirportsAfterKeywordPredicateCountryUK -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports | WHERE country == "United Kingdom" @@ -443,7 +443,7 @@ POINT (-2.597342072712148 54.33551226578214) | 17 | true ; centroidFromAirportsAfterIntersectsEvalExpression -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports | EVAL in_uk = ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((1.2305 60.8449, -1.582 61.6899, -10.7227 58.4017, -7.1191 55.3291, -7.9102 54.2139, -5.4492 54.0078, -5.2734 52.3756, -7.8223 49.6676, -5.0977 49.2678, 0.9668 50.5134, 2.5488 52.1065, 2.6367 54.0078, -0.9668 56.4625, 1.2305 60.8449))")) @@ -461,7 +461,7 @@ POINT (0.04453958108176276 23.74658354606057) | 873 | false ; centroidFromAirportsAfterIntersectsPredicate -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports | WHERE ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) @@ -473,7 +473,7 @@ POINT (42.97109629958868 14.7552534006536) | 1 ; centroidFromAirportsAfterIntersectsCompoundPredicate -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports | WHERE scalerank == 9 AND ST_INTERSECTS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) AND country == "Yemen" @@ -488,7 +488,7 @@ POINT (42.97109629958868 14.7552534006536) | 1 # Tests for ST_INTERSECTS on GEO_POINT type pointIntersectsLiteralPolygon -required_feature: esql.st_intersects +required_capability: st_intersects // tag::st_intersects-airports[] FROM airports @@ -503,7 +503,7 @@ HOD | Al Ḩudaydah | POINT(42.9511 14.8022) | Yemen ; pointIntersectsLiteralPolygonReversed -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports | WHERE ST_INTERSECTS(TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))"), location) @@ -514,7 +514,7 @@ HOD | Al Ḩudaydah | POINT(42.9511 14.8022) | Yemen ; literalPointIntersectsLiteralPolygon -required_feature: esql.st_intersects +required_capability: st_intersects ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -528,7 +528,7 @@ wkt:keyword | pt:geo_point ; literalPointIntersectsLiteralPolygonReversed -required_feature: esql.st_intersects +required_capability: st_intersects ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -542,7 +542,7 @@ wkt:keyword | pt:geo_point ; literalPointIntersectsLiteralPolygonOneRow -required_feature: esql.st_intersects +required_capability: st_intersects ROW intersects = ST_INTERSECTS(TO_GEOPOINT("POINT(0 0)"), TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) ; @@ -552,7 +552,7 @@ true ; cityInCityBoundary -required_feature: esql.st_intersects +required_capability: st_intersects FROM airport_city_boundaries | EVAL in_city = ST_INTERSECTS(city_location, city_boundary) @@ -568,7 +568,7 @@ cardinality:k | in_city:boolean ; cityNotInCityBoundaryBiggest -required_feature: esql.st_intersects +required_capability: st_intersects FROM airport_city_boundaries | WHERE NOT ST_INTERSECTS(city_location, city_boundary) @@ -583,7 +583,7 @@ SYX | Sanya Phoenix Int'l | Sanya | POINT(109.5036 18.253 ; airportCityLocationPointIntersection -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_mp | WHERE ST_INTERSECTS(location, city_location) @@ -594,7 +594,7 @@ XXX | Atlantis | POINT(0 0) | Atlantis ; airportCityLocationPointIntersectionCentroid -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_mp | WHERE ST_INTERSECTS(location, city_location) @@ -609,7 +609,7 @@ POINT (0 0) | POINT (0 0) | 1 # Tests for ST_DISJOINT on GEO_POINT type literalPolygonDisjointLiteralPoint -required_feature: esql.st_disjoint +required_capability: st_disjoint ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -623,7 +623,7 @@ wkt:keyword | pt:geo_point ; literalPointDisjointLiteralPolygon -required_feature: esql.st_disjoint +required_capability: st_disjoint ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -637,7 +637,7 @@ wkt:keyword | pt:geo_point ; literalPolygonDisjointLiteralPointOneRow -required_feature: esql.st_disjoint +required_capability: st_disjoint ROW disjoint = ST_DISJOINT(TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), TO_GEOPOINT("POINT(0 0)")) ; @@ -647,7 +647,7 @@ false ; literalPointDisjointLiteralPolygonOneRow -required_feature: esql.st_disjoint +required_capability: st_disjoint ROW disjoint = ST_DISJOINT(TO_GEOPOINT("POINT(-1 0)"), TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) ; @@ -657,7 +657,7 @@ true ; pointDisjointLiteralPolygon -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM airports | WHERE ST_DISJOINT(location, TO_GEOSHAPE("POLYGON((-10 -60, 120 -60, 120 60, -10 60, -10 -60))")) @@ -679,7 +679,7 @@ x:double | y:double | count:long ; airportCityLocationPointDisjointCentroid -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM airports_mp | WHERE ST_DISJOINT(location, city_location) @@ -694,7 +694,7 @@ POINT (67.8581917192787 24.02956652920693) | POINT (67.81638333333332 24.0489999 # Tests for ST_CONTAINS on GEO_POINT type literalPolygonContainsLiteralPoint -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -708,7 +708,7 @@ wkt:keyword | pt:geo_point ; literalPointDoesNotContainLiteralPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -720,7 +720,7 @@ wkt:keyword | pt:geo_point ; literalPolygonContainsLiteralPointOneRow -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW contains = ST_CONTAINS(TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), TO_GEOPOINT("POINT(0 0)")) ; @@ -730,7 +730,7 @@ true ; literalPointDoesNotContainLiteralPolygonOneRow -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW contains = ST_CONTAINS(TO_GEOPOINT("POINT(0 0)"), TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) ; @@ -740,7 +740,7 @@ false ; pointContainsLiteralPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports | WHERE ST_CONTAINS(location, TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))")) @@ -750,7 +750,7 @@ abbrev:keyword | city:keyword | city_location:geo_point | country:keyword ; pointContainedInLiteralPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports | WHERE ST_CONTAINS(TO_GEOSHAPE("POLYGON((42 14, 43 14, 43 15, 42 15, 42 14))"), location) @@ -761,7 +761,7 @@ HOD | Al Ḩudaydah | POINT(42.9511 14.8022) | Yemen ; airportCityLocationPointContains -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports_mp | WHERE ST_CONTAINS(location, city_location) @@ -772,7 +772,7 @@ XXX | Atlantis | POINT(0 0) | Atlantis ; airportCityLocationPointContainsCentroid -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports_mp | WHERE ST_CONTAINS(location, city_location) @@ -787,7 +787,7 @@ POINT (0 0) | POINT (0 0) | 1 # Tests for ST_WITHIN on GEO_POINT type literalPolygonNotWithinLiteralPoint -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -799,7 +799,7 @@ wkt:keyword | pt:geo_point ; literalPointWithinLiteralPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -813,7 +813,7 @@ wkt:keyword | pt:geo_point ; literalPolygonNotWithinLiteralPointOneRow -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW within = ST_WITHIN(TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), TO_GEOPOINT("POINT(0 0)")) ; @@ -823,7 +823,7 @@ false ; literalPointWithinLiteralPolygonOneRow -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW within = ST_WITHIN(TO_GEOPOINT("POINT(0 0)"), TO_GEOSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) ; @@ -833,7 +833,7 @@ true ; pointWithinLiteralPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within // tag::st_within-airports[] FROM airports @@ -848,7 +848,7 @@ HOD | Al Ḩudaydah | POINT(42.9511 14.8022) | Yemen ; airportCityLocationPointWithin -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports_mp | WHERE ST_WITHIN(location, city_location) @@ -859,7 +859,7 @@ XXX | Atlantis | POINT(0 0) | Atlantis ; airportCityLocationPointWithinCentroid -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports_mp | WHERE ST_WITHIN(location, city_location) @@ -874,7 +874,7 @@ POINT (0 0) | POINT (0 0) | 1 # Tests for Equality and casting with GEO_POINT geoPointEquals -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source // tag::to_geopoint-equals[] ROW wkt = ["POINT(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)"] @@ -891,7 +891,7 @@ wkt:keyword |pt:geo_point ; geoPointNotEquals -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source // tag::to_geopoint-not-equals[] ROW wkt = ["POINT(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)"] @@ -908,7 +908,7 @@ wkt:keyword |pt:geo_point ; convertFromStringParseError -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source // tag::to_geopoint-str-parse-error[] row wkt = ["POINTX(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)", "POINT(111)"] @@ -936,7 +936,7 @@ wkt:keyword |pt:geo_point ############################################### convertCartesianFromString -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source // tag::to_cartesianpoint-str[] ROW wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] @@ -953,7 +953,7 @@ wkt:keyword |pt:cartesian_point ; convertCartesianFromStringArray -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source row wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] | eval pt = to_cartesianpoint(wkt); @@ -963,7 +963,7 @@ wkt:keyword |pt:cartesian_point ; centroidCartesianFromStringNested -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg row wkt = "POINT(4297.10986328125 -1475.530029296875)" | STATS c = ST_CENTROID_AGG(TO_CARTESIANPOINT(wkt)); @@ -973,7 +973,7 @@ POINT(4297.10986328125 -1475.530029296875) ; centroidFromCartesianString1 -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg ROW wkt = ["POINT(4297.10986328125 -1475.530029296875)"] | MV_EXPAND wkt @@ -985,7 +985,7 @@ POINT(4297.10986328125 -1475.530029296875) ; centroidFromCartesianString2 -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg ROW wkt = ["POINT(4297.10986328125 -1475.530029296875)", "POINT(7580.93017578125 2272.77001953125)"] | MV_EXPAND wkt @@ -997,7 +997,7 @@ POINT(5939.02001953125 398.6199951171875) ; centroidFromCartesianString3 -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg ROW wkt = ["POINT(4297.10986328125 -1475.530029296875)", "POINT(7580.93017578125 2272.77001953125)", "POINT(-30.548143003023033 2437.553649504829)"] | MV_EXPAND wkt @@ -1009,7 +1009,7 @@ POINT(3949.163965353159 1078.2645465797348) ; stXFromCartesianString -required_feature: esql.st_x_y +required_capability: st_x_y ROW point = TO_CARTESIANPOINT("POINT(4297.10986328125 -1475.530029296875)") | EVAL x = ST_X(point), y = ST_Y(point) @@ -1020,7 +1020,7 @@ POINT(4297.10986328125 -1475.530029296875) | 4297.10986328125 | -1475.530029296 ; simpleCartesianLoad -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source FROM airports_web | WHERE scalerank == 9 | SORT abbrev | WHERE length(name) > 12; @@ -1039,7 +1039,7 @@ ZAH | POINT (6779435.866395892 3436280.545331025) | Zahedan Int'l # Tests for ST_CENTROID on CARTESIAN_POINT type cartesianCentroidFromAirports -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports_web | STATS centroid=ST_CENTROID_AGG(location); @@ -1049,7 +1049,7 @@ POINT(-266681.67563861894 3053301.5120195406) ; cartesianCentroidFromAirportsNested -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports_web | STATS centroid=ST_CENTROID_AGG(TO_CARTESIANPOINT(location)); @@ -1059,7 +1059,7 @@ POINT (-266681.66530554957 3053301.506061676) ; cartesianCentroidFromAirportsCount -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports_web | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() @@ -1070,7 +1070,7 @@ POINT(-266681.67563861894 3053301.5120195406) | 849 ; cartesianCentroidFromAirportsCountGrouped -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports_web | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() BY scalerank @@ -1089,7 +1089,7 @@ POINT(140136.12878224207 3081220.7881944445) | 63 | 2 ; cartesianCentroidFromAirportsFiltered -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports_web | WHERE scalerank == 9 @@ -1101,7 +1101,7 @@ POINT(9289013.153846154 3615537.0533353365) | 26 ; cartesianCentroidFromAirportsFilteredAndSorted -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports_web | WHERE scalerank == 9 @@ -1115,7 +1115,7 @@ POINT(9003597.4375 3429344.0078125) | 8 ; cartesianCentroidFromAirportsCountGroupedCentroid -required_feature: esql.st_centroid_agg +required_capability: st_centroid_agg FROM airports_web | STATS centroid=ST_CENTROID_AGG(location), count=COUNT() BY scalerank @@ -1130,7 +1130,7 @@ POINT (726480.0130685265 3359566.331716279) | 849 # Tests for ST_INTERSECTS on CARTESIAN_POINT type cartesianCentroidFromAirportsAfterIntersectsPredicate -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) @@ -1142,7 +1142,7 @@ POINT (4783520.5 1661010.0) | 1 ; cartesianPointIntersectsPolygon -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) @@ -1153,7 +1153,7 @@ HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | ; literalCartesianPointIntersectsPolygon -required_feature: esql.st_intersects +required_capability: st_intersects ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -1167,7 +1167,7 @@ wkt:keyword | pt:cartesian_point ; cartesianPointIntersectsPointShape -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POINT(4783520.559160681 1661010.0197476079)")) @@ -1178,7 +1178,7 @@ HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | ; cartesianPointIntersectsPoint -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANPOINT("POINT(4783520.559160681 1661010.0197476079)")) @@ -1189,7 +1189,7 @@ HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | ; cartesianPointIntersectsMultiPoint -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("MULTIPOINT(4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096)")) @@ -1202,7 +1202,7 @@ CPH | POINT (1408119.2975413958 7484813.53657096) | Copenhagen | ; cartesianPointIntersectsLineString -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("LINESTRING(4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096)")) @@ -1215,7 +1215,7 @@ CPH | POINT (1408119.2975413958 7484813.53657096) | Copenhagen | ; cartesianPointIntersectsMultiLineString -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("MULTILINESTRING((4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096),(1408119.2975413958 7484813.53657096, 1996039.722208033 8322469.9470024165))")) @@ -1229,7 +1229,7 @@ ARN | POINT(1996039.722208033 8322469.9470024165) | Arlanda | ; cartesianPointIntersectsPointShapeWithCentroid -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POINT(4783520.559160681 1661010.0197476079)")) @@ -1241,7 +1241,7 @@ POINT (4783520.5 1661010.0) | 1 ; cartesianPointIntersectsPointWithCentroid -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANPOINT("POINT(4783520.559160681 1661010.0197476079)")) @@ -1253,7 +1253,7 @@ POINT (4783520.5 1661010.0) | 1 ; cartesianPointIntersectsLiteralPolygonCount -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((0 -60000000, 120000000 -60000000, 120000000 60000000, 0 60000000, 0 -60000000))")) @@ -1268,7 +1268,7 @@ count:long # Tests for ST_DISJOINT on CARTESIAN_POINT type literalPolygonDisjointLiteralCartesianPoint -required_feature: esql.st_disjoint +required_capability: st_disjoint ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -1282,7 +1282,7 @@ wkt:keyword | pt:cartesian_point ; literalCartesianPointDisjointLiteralPolygon -required_feature: esql.st_disjoint +required_capability: st_disjoint ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -1296,7 +1296,7 @@ wkt:keyword | pt:cartesian_point ; literalPolygonDisjointLiteralCartesianPointOneRow -required_feature: esql.st_disjoint +required_capability: st_disjoint ROW disjoint = ST_DISJOINT(TO_CARTESIANSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))"), TO_CARTESIANPOINT("POINT(0 0)")) ; @@ -1306,7 +1306,7 @@ false ; literalCartesianPointDisjointLiteralPolygonOneRow -required_feature: esql.st_disjoint +required_capability: st_disjoint ROW disjoint = ST_DISJOINT(TO_CARTESIANPOINT("POINT(-1 0)"), TO_CARTESIANSHAPE("POLYGON((0 -1, 1 -1, 1 1, 0 1, 0 -1))")) ; @@ -1316,7 +1316,7 @@ true ; cartesianPointDisjointLiteralPolygonCount -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM airports_web | WHERE ST_DISJOINT(location, TO_CARTESIANSHAPE("POLYGON((0 -60000000, 120000000 -60000000, 120000000 60000000, 0 60000000, 0 -60000000))")) @@ -1328,7 +1328,7 @@ count:long ; cartesianPointIntersectsDisjointLiteralPolygonCount -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM airports_web | EVAL intersects = ST_INTERSECTS(location, TO_CARTESIANSHAPE("POLYGON((0 -60000000, 120000000 -60000000, 120000000 60000000, 0 60000000, 0 -60000000))")) @@ -1344,7 +1344,7 @@ false | true | 405 ; cartesianPointDisjointLiteralPolygon -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM airports_web | WHERE ST_DISJOINT(location, TO_CARTESIANSHAPE("POLYGON((0 -60000000, 120000000 -60000000, 120000000 60000000, 0 60000000, 0 -60000000))")) @@ -1365,7 +1365,7 @@ x:double | y:double | count:long ; cartesianPointDisjointEmptyGeometry -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM airports_web | WHERE ST_DISJOINT(location, TO_CARTESIANSHAPE("LINESTRING()")) @@ -1380,7 +1380,7 @@ count:long ; cartesianPointDisjointInvalidGeometry -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM airports_web | WHERE ST_DISJOINT(location, TO_CARTESIANSHAPE("Invalid Geometry")) @@ -1398,7 +1398,7 @@ count:long # Tests for ST_CONTAINS on CARTESIAN_POINT type cartesianCentroidFromAirportsAfterPolygonContainsPointPredicate -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports_web | WHERE ST_CONTAINS(TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))"), location) @@ -1410,7 +1410,7 @@ POINT (4783520.5 1661010.0) | 1 ; cartesianPolygonContainsPointPredicate -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports_web | WHERE ST_CONTAINS(TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))"), location) @@ -1421,7 +1421,7 @@ HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | ; literalCartesianPolygonContainsPointPredicate -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -1435,7 +1435,7 @@ wkt:keyword | pt:cartesian_point ; cartesianCentroidFromAirportsAfterPointContainsPolygonPredicate -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) @@ -1447,7 +1447,7 @@ POINT (NaN NaN) | 0 ; cartesianPointContainsPolygonPredicate -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) @@ -1457,7 +1457,7 @@ abbrev:keyword | location:cartesian_point | name:text | ; literalCartesianPointContainsPolygonPredicate -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -1469,7 +1469,7 @@ wkt:keyword | pt:cartesian_point ; cartesianPointContainsPointShape -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("POINT(4783520.559160681 1661010.0197476079)")) @@ -1480,7 +1480,7 @@ HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | ; cartesianPointContainsPoint -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANPOINT("POINT(4783520.559160681 1661010.0197476079)")) @@ -1491,7 +1491,7 @@ HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | ; cartesianPointContainsMultiPoint -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("MULTIPOINT(4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096)")) @@ -1502,7 +1502,7 @@ abbrev:keyword | location:cartesian_point | name:text | ; cartesianPointContainsLineString -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("LINESTRING(4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096)")) @@ -1513,7 +1513,7 @@ abbrev:keyword | location:cartesian_point | name:text | ; cartesianPointContainsMultiLineString -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("MULTILINESTRING((4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096),(1408119.2975413958 7484813.53657096, 1996039.722208033 8322469.9470024165))")) @@ -1524,7 +1524,7 @@ abbrev:keyword | location:cartesian_point | name:text | ; cartesianPointContainsPointShapeWithCentroid -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANSHAPE("POINT(4783520.559160681 1661010.0197476079)")) @@ -1536,7 +1536,7 @@ POINT (4783520.5 1661010.0) | 1 ; cartesianPointContainsPointWithCentroid -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_CONTAINS(location, TO_CARTESIANPOINT("POINT(4783520.559160681 1661010.0197476079)")) @@ -1551,7 +1551,7 @@ POINT (4783520.5 1661010.0) | 1 # Tests for ST_WITHIN on CARTESIAN_POINT type cartesianCentroidFromAirportsAfterWithinPredicate -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) @@ -1563,7 +1563,7 @@ POINT (4783520.5 1661010.0) | 1 ; cartesianPointWithinPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("POLYGON((4700000 1600000, 4800000 1600000, 4800000 1700000, 4700000 1700000, 4700000 1600000))")) @@ -1574,7 +1574,7 @@ HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | ; literalCartesianPointWithinPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -1588,7 +1588,7 @@ wkt:keyword | pt:cartesian_point ; cartesianPointWithinPointShape -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("POINT(4783520.559160681 1661010.0197476079)")) @@ -1599,7 +1599,7 @@ HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | ; cartesianPointWithinPoint -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANPOINT("POINT(4783520.559160681 1661010.0197476079)")) @@ -1610,7 +1610,7 @@ HOD | POINT (4783520.559160681 1661010.0197476079) | Hodeidah Int'l | ; cartesianPointWithinMultiPoint -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("MULTIPOINT(4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096)")) @@ -1623,7 +1623,7 @@ CPH | POINT (1408119.2975413958 7484813.53657096) | Copenhagen | ; cartesianPointWithinLineString -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("LINESTRING(4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096)")) @@ -1636,7 +1636,7 @@ CPH | POINT (1408119.2975413958 7484813.53657096) | Copenhagen | ; cartesianPointWithinMultiLineString -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("MULTILINESTRING((4783520.559160681 1661010.0197476079, 1408119.2975413958 7484813.53657096),(1408119.2975413958 7484813.53657096, 1996039.722208033 8322469.9470024165))")) @@ -1650,7 +1650,7 @@ ARN | POINT(1996039.722208033 8322469.9470024165) | Arlanda | ; cartesianPointWithinPointShapeWithCentroid -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANSHAPE("POINT(4783520.559160681 1661010.0197476079)")) @@ -1662,7 +1662,7 @@ POINT (4783520.5 1661010.0) | 1 ; cartesianPointWithinPointWithCentroid -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | WHERE ST_WITHIN(location, TO_CARTESIANPOINT("POINT(4783520.559160681 1661010.0197476079)")) @@ -1677,7 +1677,7 @@ POINT (4783520.5 1661010.0) | 1 # Tests for Equality and casting with GEO_POINT cartesianPointEquals -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source // tag::to_cartesianpoint-equals[] ROW wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] @@ -1694,7 +1694,7 @@ wkt:keyword |pt:cartesian_point ; cartesianPointNotEquals -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source // tag::to_cartesianpoint-not-equals[] ROW wkt = ["POINT(4297.11 -1475.53)", "POINT(7580.93 2272.77)"] @@ -1711,7 +1711,7 @@ wkt:keyword |pt:cartesian_point ; convertCartesianFromStringParseError -required_feature: esql.spatial_points_from_source +required_capability: spatial_points_from_source // tag::to_cartesianpoint-str-parse-error[] row wkt = ["POINTX(4297.11 -1475.53)", "POINT(7580.93 2272.77)", "POINT(111)"] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec index 6d0d15c39898..dd092130c340 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial_shapes.csv-spec @@ -3,7 +3,7 @@ # convertFromString -required_feature: esql.spatial_shapes +required_capability: spatial_shapes // tag::to_geoshape-str[] ROW wkt = "POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))" @@ -18,7 +18,7 @@ wkt:keyword | geom:geo_shape ; convertFromStringArray -required_feature: esql.spatial_shapes +required_capability: spatial_shapes row wkt = ["POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))", "POINT(75.8092915005895 22.727749187571)"] | eval pt = to_geoshape(wkt); @@ -28,7 +28,7 @@ wkt:keyword ; convertFromStringViaPoint -required_feature: esql.spatial_shapes +required_capability: spatial_shapes ROW wkt = "POINT (30 10)" | EVAL point = TO_GEOPOINT(wkt) @@ -41,7 +41,7 @@ wkt:keyword | point:geo_point | shape:geo_shape # need to work out how to upload WKT simpleLoad -required_feature: esql.spatial_shapes +required_capability: spatial_shapes FROM countries_bbox | WHERE id == "ISL"; @@ -50,7 +50,7 @@ ISL|Iceland|BBOX(-24.538400, -13.499446, 66.536100, 63.390000) ; simpleLoadPointsAsShapes -required_feature: esql.spatial_shapes +required_capability: spatial_shapes FROM airports | WHERE abbrev == "CPH" OR abbrev == "VLC" @@ -80,7 +80,7 @@ CPH | Københavns Kommune | POINT(12.5683 55.6761) | Copenhagen # Tests for ST_INTERSECTS with GEO_SHAPE pointIntersectsLiteralPolygon -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports | EVAL location = TO_GEOSHAPE(location) @@ -93,7 +93,7 @@ HOD | Hodeidah Int'l | POINT(42.97109630194 14.7552534413725) | Yemen ; polygonIntersectsLiteralPolygon -required_feature: esql.st_intersects +required_capability: st_intersects FROM airport_city_boundaries | WHERE ST_INTERSECTS(city_boundary, TO_GEOSHAPE("POLYGON((109.4 18.1, 109.6 18.1, 109.6 18.3, 109.4 18.3, 109.4 18.1))")) @@ -106,7 +106,7 @@ SYX | Sanya Phoenix Int'l | 天涯区 | Sanya | POINT(1 ; pointIntersectsLiteralPolygonReversed -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports | EVAL location = TO_GEOSHAPE(location) @@ -119,7 +119,7 @@ HOD | Hodeidah Int'l | POINT(42.97109630194 14.7552534413725) | Yemen ; literalPointIntersectsLiteralPolygon -required_feature: esql.st_intersects +required_capability: st_intersects ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -133,7 +133,7 @@ wkt:keyword | pt:geo_point ; literalPointIntersectsLiteralPolygonReversed -required_feature: esql.st_intersects +required_capability: st_intersects ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -147,7 +147,7 @@ wkt:keyword | pt:geo_point ; literalPointAsShapeIntersectsLiteralPolygon -required_feature: esql.st_intersects +required_capability: st_intersects ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -161,7 +161,7 @@ wkt:keyword | pt:geo_shape ; literalPointAsShapeIntersectsLiteralPolygonReversed -required_feature: esql.st_intersects +required_capability: st_intersects ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -175,7 +175,7 @@ wkt:keyword | pt:geo_shape ; shapeIntersectsLiteralPolygon -required_feature: esql.st_intersects +required_capability: st_intersects FROM countries_bbox | WHERE ST_INTERSECTS(shape, TO_GEOSHAPE("POLYGON((29 -30, 31 -30, 31 -27.3, 29 -27.3, 29 -30))")) @@ -189,7 +189,7 @@ LSO | Lesotho | BBOX(27.013973, 29.455554, -28.570691, -30.650527) ; literalPolygonIntersectsLiteralPolygon -required_feature: esql.st_intersects +required_capability: st_intersects ROW wkt = ["POLYGON((-20 60, -6 60, -6 66, -20 66, -20 60))", "POLYGON((20 60, 6 60, 6 66, 20 66, 20 60))"] | EVAL other = TO_GEOSHAPE("POLYGON((-15 64, -10 64, -10 66, -15 66, -15 64))") @@ -204,7 +204,7 @@ wkt:keyword | shape:geo_shape ; literalPolygonIntersectsLiteralPolygonOneRow -required_feature: esql.st_intersects +required_capability: st_intersects ROW intersects = ST_INTERSECTS(TO_GEOSHAPE("POLYGON((-20 60, -6 60, -6 66, -20 66, -20 60))"), TO_GEOSHAPE("POLYGON((-15 64, -10 64, -10 66, -15 66, -15 64))")) ; @@ -217,7 +217,7 @@ true # Tests for ST_DISJOINT with GEO_SHAPE polygonDisjointLiteralPolygon -required_feature: esql.st_disjoint +required_capability: st_disjoint // tag::st_disjoint-airport_city_boundaries[] FROM airport_city_boundaries @@ -238,7 +238,7 @@ ACA | General Juan N Alvarez Int'l | Acapulco de Juárez | Acapulco d # Tests for ST_CONTAINS and ST_WITHIN with GEO_SHAPE polygonContainsLiteralPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within // tag::st_contains-airport_city_boundaries[] FROM airport_city_boundaries @@ -255,7 +255,7 @@ SYX | Sanya Phoenix Int'l | 天涯区 | Sanya | POINT(1 ; polygonWithinLiteralPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within // tag::st_within-airport_city_boundaries[] FROM airport_city_boundaries @@ -275,7 +275,7 @@ SYX | Sanya Phoenix Int'l | 天涯区 | Sanya | POINT(1 # Tests for Equality and casting with GEO_SHAPE geo_shapeEquals -required_feature: esql.spatial_shapes +required_capability: spatial_shapes ROW wkt = ["POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))", "POINT(75.8092915005895 22.727749187571)"] | MV_EXPAND wkt @@ -288,7 +288,7 @@ wkt:keyword |pt:geo_shape ; geo_shapeNotEquals -required_feature: esql.spatial_shapes +required_capability: spatial_shapes ROW wkt = ["POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))", "POINT(75.8092915005895 22.727749187571)"] | MV_EXPAND wkt @@ -301,7 +301,7 @@ wkt:keyword |pt:geo_shape ; convertFromStringParseError -required_feature: esql.spatial_shapes +required_capability: spatial_shapes row wkt = ["POINTX(42.97109630194 14.7552534413725)", "POINT(75.8092915005895 22.727749187571)", "POINT(111)"] | mv_expand wkt @@ -323,7 +323,7 @@ wkt:keyword |pt:geo_shape # convertCartesianShapeFromString -required_feature: esql.spatial_shapes +required_capability: spatial_shapes // tag::to_cartesianshape-str[] ROW wkt = ["POINT(4297.11 -1475.53)", "POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))"] @@ -340,7 +340,7 @@ wkt:keyword |geom:cartesian_shape ; convertCartesianFromStringArray -required_feature: esql.spatial_shapes +required_capability: spatial_shapes row wkt = ["POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))", "POINT(7580.93 2272.77)"] | eval pt = to_cartesianshape(wkt); @@ -350,7 +350,7 @@ wkt:keyword ; convertCartesianFromStringViaPoint -required_feature: esql.spatial_shapes +required_capability: spatial_shapes ROW wkt = "POINT (3010 -1010)" | EVAL point = TO_CARTESIANPOINT(wkt) @@ -363,7 +363,7 @@ wkt:keyword | point:cartesian_point | shape:cartesian_shape # need to work out how to upload WKT simpleCartesianShapeLoad -required_feature: esql.spatial_shapes +required_capability: spatial_shapes FROM countries_bbox_web | WHERE id == "ISL"; @@ -372,7 +372,7 @@ ISL|Iceland|BBOX(-2731602.192501422, -1502751.454502109, 1.0025136653899286E7, 9 ; simpleLoadCartesianPointsAsShapes -required_feature: esql.spatial_shapes +required_capability: spatial_shapes FROM airports_web | WHERE abbrev == "CPH" OR abbrev == "VLC" @@ -389,7 +389,7 @@ abbrev:keyword | name:text | scalerank:integer | type:keyword | location:cart # Tests for ST_INTERSECTS with CARTESIAN_SHAPE cartesianPointIntersectsPolygon -required_feature: esql.st_intersects +required_capability: st_intersects FROM airports_web | EVAL location = TO_CARTESIANSHAPE(location) @@ -402,7 +402,7 @@ HOD | Hodeidah Int'l | POINT (4783520.559160681 1661010.0197476079) | ; literalCartesianPointIntersectsPolygon -required_feature: esql.st_intersects +required_capability: st_intersects ROW wkt = ["POINT(1 1)", "POINT(-1 -1)", "POINT(-1 1)", "POINT(1 -1)"] | MV_EXPAND wkt @@ -416,7 +416,7 @@ wkt:keyword | pt:cartesian_shape ; cartesianShapeIntersectsPolygon -required_feature: esql.st_intersects +required_capability: st_intersects FROM countries_bbox_web | WHERE ST_INTERSECTS(shape, TO_CARTESIANSHAPE("POLYGON((3100000 -3400000, 3500000 -3400000, 3500000 -3150000, 3100000 -3150000, 3100000 -3400000))")) @@ -430,7 +430,7 @@ LSO | Lesotho | BBOX(3007181.718244638, 3278977.271857335, -3321117. ; literalCartesianPolygonIntersectsPolygon -required_feature: esql.st_intersects +required_capability: st_intersects ROW wkt = ["POLYGON((-2000 6000, -600 6000, -600 6600, -2000 6600, -2000 6000))", "POLYGON((2000 6000, 600 6000, 600 6600, 2000 6600, 2000 6000))"] | MV_EXPAND wkt @@ -447,7 +447,7 @@ wkt:keyword | shape:ca # Tests for ST_DISJOINT with CARTESIAN_SHAPE cartesianPolygonDisjointLiteralPolygon -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM countries_bbox_web | WHERE ST_DISJOINT(shape, TO_CARTESIANSHAPE("POLYGON((3100000 -3400000, 3500000 -3400000, 3500000 -3150000, 3100000 -3150000, 3100000 -3400000))")) @@ -460,7 +460,7 @@ ZWE | Zimbabwe | BBOX (2809472.180051312, 3681512.6693309383, -176035 ; cartesianPolygonDisjointEmptyGeometry -required_feature: esql.st_disjoint +required_capability: st_disjoint FROM countries_bbox_web | WHERE ST_DISJOINT(shape, TO_CARTESIANSHAPE("LINESTRING()")) @@ -478,7 +478,7 @@ count:long # Tests for ST_CONTAINS and ST_WITHIN with CARTESIAN_SHAPE cartesianShapeContainsPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM countries_bbox_web | WHERE ST_CONTAINS(shape, TO_CARTESIANSHAPE("POLYGON((3100000 -3400000, 3500000 -3400000, 3500000 -3150000, 3100000 -3150000, 3100000 -3400000))")) @@ -490,7 +490,7 @@ ZAF | South Africa | BBOX(1834915.5679635953, 4218142.412200545, -2527908 ; cartesianShapeWithinPolygon -required_feature: esql.st_contains_within +required_capability: st_contains_within FROM countries_bbox_web | WHERE ST_WITHIN(shape, TO_CARTESIANSHAPE("POLYGON((1800000 -2500000, 4300000 -2500000, 4300000 -6000000, 1800000 -6000000, 1800000 -2500000))")) @@ -507,7 +507,7 @@ LSO | Lesotho | BBOX(3007181.718244638, 3278977.271857335, -3321117. # Tests for Equality and casting with CARTESIAN_SHAPE cartesianshapeEquals -required_feature: esql.spatial_shapes +required_capability: spatial_shapes ROW wkt = ["POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))", "POINT(7580.93 2272.77)"] | MV_EXPAND wkt @@ -520,7 +520,7 @@ wkt:keyword |pt:cartesian_shape ; cartesianShapeNotEquals -required_feature: esql.spatial_shapes +required_capability: spatial_shapes ROW wkt = ["POLYGON ((3339584.72 1118889.97, 4452779.63 4865942.27, 2226389.81 4865942.27, 1113194.90 2273030.92, 3339584.72 1118889.97))", "POINT(7580.93 2272.77)"] | MV_EXPAND wkt @@ -533,7 +533,7 @@ wkt:keyword |pt:cartesian_shape ; convertCartesianShapeFromStringParseError -required_feature: esql.spatial_shapes +required_capability: spatial_shapes row wkt = ["POINTX(4297.11 -1475.53)", "POINT(7580.93 2272.77)", "POINT(111)"] | mv_expand wkt diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index 5bdf0bd963fe..632274631823 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -71,7 +71,7 @@ emp_no:integer | last_name:keyword | gender:keyword | f_l:boolean ; stringCast -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting ROW a = 1 | eval ss = substring("abcd", "2"), l = left("abcd", "2"), r = right("abcd", "2"); @@ -80,7 +80,7 @@ a:integer | ss:keyword | l:keyword | r:keyword ; stringCastEmp -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting from employees | eval ss = substring(first_name, "2") @@ -330,7 +330,7 @@ emp_no:integer | name:keyword // Note: no matches in MV returned in -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where job_positions in ("Internship", first_name) | keep emp_no, job_positions; ignoreOrder:true @@ -522,7 +522,7 @@ emp_no:integer |positions:keyword ; lessThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where job_positions < "C" | keep emp_no, job_positions | sort emp_no; warning:Line 1:24: evaluation of [job_positions < \"C\"] failed, treating result as null. Only first 20 failures recorded. @@ -535,7 +535,7 @@ emp_no:integer |job_positions:keyword ; greaterThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where job_positions > "C" | keep emp_no, job_positions | sort emp_no | limit 6; warning:Line 1:24: evaluation of [job_positions > \"C\"] failed, treating result as null. Only first 20 failures recorded. @@ -552,7 +552,7 @@ emp_no:integer |job_positions:keyword ; equalToMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where job_positions == "Accountant" | keep emp_no, job_positions | sort emp_no; warning:Line 1:24: evaluation of [job_positions == \"Accountant\"] failed, treating result as null. Only first 20 failures recorded. @@ -564,7 +564,7 @@ emp_no:integer |job_positions:keyword ; equalToOrEqualToMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where job_positions == "Accountant" or job_positions == "Tech Lead" | keep emp_no, job_positions | sort emp_no; warning:Line 1:24: evaluation of [job_positions] failed, treating result as null. Only first 20 failures recorded. @@ -577,7 +577,7 @@ emp_no:integer |job_positions:keyword ; inMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where job_positions in ("Accountant", "Tech Lead") | keep emp_no, job_positions | sort emp_no; warning:Line 1:24: evaluation of [job_positions in (\"Accountant\", \"Tech Lead\")] failed, treating result as null. Only first 20 failures recorded. @@ -590,7 +590,7 @@ emp_no:integer |job_positions:keyword ; notLessThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where not(job_positions < "C") | keep emp_no, job_positions | sort emp_no | limit 6; warning:Line 1:24: evaluation of [not(job_positions < \"C\")] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [job_positions < \"C\"] failed, treating result as null. Only first 20 failures recorded.] @@ -607,7 +607,7 @@ emp_no:integer |job_positions:keyword ; notGreaterThanMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where not(job_positions > "C") | keep emp_no, job_positions | sort emp_no | limit 6; warning:Line 1:24: evaluation of [not(job_positions > \"C\")] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [job_positions > \"C\"] failed, treating result as null. Only first 20 failures recorded.] @@ -620,7 +620,7 @@ emp_no:integer |job_positions:keyword ; notEqualToMultivalue -required_feature: esql.mv_warn +required_capability: mv_warn from employees | where not(job_positions == "Accountant") | keep emp_no, job_positions | sort emp_no | limit 6; warning:Line 1:24: evaluation of [not(job_positions == \"Accountant\")] failed, treating result as null. Only first 20 failures recorded.#[Emulated:Line 1:28: evaluation of [job_positions == \"Accountant\"] failed, treating result as null. Only first 20 failures recorded.] @@ -745,7 +745,7 @@ ROW a=[10, 9, 8] ; mvSort -required_feature: esql.mv_sort +required_capability: mv_sort row a = ["Mon", "Tues", "Wed", "Thu", "Fri"] | eval sa = mv_sort(a), sd = mv_sort(a, "DESC"); @@ -754,7 +754,7 @@ a:keyword | sa:keyword | sd:keyword ; mvSortEmp -required_feature: esql.mv_sort +required_capability: mv_sort FROM employees | eval sd = mv_sort(job_positions, "DESC"), sa = mv_sort(job_positions) @@ -772,7 +772,7 @@ emp_no:integer | job_positions:keyword ; mvSliceCast -required_feature: esql.string_literal_auto_casting +required_capability: string_literal_auto_casting ROW a = ["1", "2", "3", "4"] | eval a1 = mv_slice(a, "0", "1"); @@ -782,7 +782,7 @@ a:keyword | a1:keyword ; mvSliceEmp -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval a1 = mv_slice(salary_change.keyword, 0, 1) @@ -799,7 +799,7 @@ emp_no:integer | salary_change.keyword:keyword | a1:keyword ; mvZip -required_feature: esql.mv_sort +required_capability: mv_sort // tag::mv_zip[] ROW a = ["x", "y", "z"], b = ["1", "2"] @@ -815,7 +815,7 @@ a:keyword | b:keyword | c:keyword ; mvZipEmp -required_feature: esql.mv_sort +required_capability: mv_sort from employees | eval full_name = mv_zip(first_name, last_name, " "), full_name_2 = mv_zip(last_name, first_name), jobs = mv_zip(job_positions, salary_change.keyword, "#") @@ -842,7 +842,7 @@ beta | Kubernetes cluster | [beta k8s server, beta k8s server2 ; lengthOfText -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | where host=="epsilon" | eval l1 = length(host_group), l2 = length(description) | keep l1, l2; ignoreOrder:true @@ -856,7 +856,7 @@ null | 19 ; startsWithText -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | where host=="epsilon" | eval l1 = starts_with(host_group, host), l2 = starts_with(description, host) | keep l1, l2; ignoreOrder:true @@ -870,7 +870,7 @@ false | null ; substringOfText -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | where host=="epsilon" | eval l1 = substring(host_group, 0, 5), l2 = substring(description, 0, 5) | keep l1, l2; ignoreOrder:true @@ -884,7 +884,7 @@ Gatew | null ; concatOfText -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | where host == "epsilon" | eval l1 = concat(host, "/", host_group), l2 = concat(host_group, "/", description) | sort l1 | keep l1, l2; warning:Line 1:86: evaluation of [concat(host_group, \"/\", description)] failed, treating result as null. Only first 20 failures recorded. @@ -1150,7 +1150,7 @@ a:keyword | upper:keyword | lower:keyword ; values -required_feature: esql.agg_values +required_capability: agg_values FROM employees | WHERE emp_no <= 10009 @@ -1162,7 +1162,7 @@ required_feature: esql.agg_values ; valuesGrouped -required_feature: esql.agg_values +required_capability: agg_values // tag::values-grouped[] FROM employees @@ -1314,7 +1314,7 @@ min(f_l):integer | max(f_l):integer | job_positions:keyword ; locateWarnings#[skip:-8.13.99,reason:new string function added in 8.14] -required_feature: esql.mv_warn +required_capability: mv_warn from hosts | where host=="epsilon" | eval l1 = locate(host_group, "ate"), l2 = locate(description, "ate") | keep l1, l2; ignoreOrder:true @@ -1328,7 +1328,7 @@ null | 0 ; base64Encode#[skip:-8.13.99,reason:new base64 function added in 8.14] -required_feature: esql.base64_decode_encode +required_capability: base64_decode_encode // tag::to_base64[] row a = "elastic" @@ -1343,7 +1343,7 @@ elastic | ZWxhc3RpYw== ; base64Decode#[skip:-8.13.99,reason:new base64 function added in 8.14] -required_feature: esql.base64_decode_encode +required_capability: base64_decode_encode // tag::from_base64[] row a = "ZWxhc3RpYw==" @@ -1358,7 +1358,7 @@ ZWxhc3RpYw== | elastic ; base64EncodeDecodeEmp#[skip:-8.13.99,reason:new base64 function added in 8.14] -required_feature: esql.base64_decode_encode +required_capability: base64_decode_encode from employees | where emp_no < 10032 and emp_no > 10027 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec index fa524d270bb9..38f3d439e750 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec @@ -46,7 +46,7 @@ from ul_logs | sort bytes_in desc nulls last, id | limit 12; ; filterPushDownGT -required_feature: esql.mv_warn +required_capability: mv_warn from ul_logs | where bytes_in >= to_ul(74330435873664882) | sort bytes_in | eval div = bytes_in / to_ul(pow(10., 15)) | keep bytes_in, div, id | limit 12; warning:Line 1:22: evaluation of [bytes_in >= to_ul(74330435873664882)] failed, treating result as null. Only first 20 failures recorded. @@ -68,7 +68,7 @@ warning:Line 1:22: java.lang.IllegalArgumentException: single-value function enc ; filterPushDownRange -required_feature: esql.mv_warn +required_capability: mv_warn from ul_logs | where bytes_in >= to_ul(74330435873664882) | where bytes_in <= to_ul(316080452389500167) | sort bytes_in | eval div = bytes_in / to_ul(pow(10., 15)) | keep bytes_in, div, id | limit 12; warning:Line 1:22: evaluation of [bytes_in >= to_ul(74330435873664882)] failed, treating result as null. Only first 20 failures recorded. @@ -84,7 +84,7 @@ warning:#[Emulated:Line 1:67: java.lang.IllegalArgumentException: single-value f ; filterPushDownIn -required_feature: esql.mv_warn +required_capability: mv_warn // TODO: testing framework doesn't perform implicit conversion to UL of given values, needs explicit conversion from ul_logs | where bytes_in in (to_ul(74330435873664882), to_ul(154551962150890564), to_ul(195161570976258241)) | sort bytes_in | keep bytes_in, id; @@ -98,7 +98,7 @@ warning:Line 1:22: java.lang.IllegalArgumentException: single-value function enc ; filterOnFieldsEquality -required_feature: esql.mv_warn +required_capability: mv_warn from ul_logs | where bytes_in == bytes_out; warning:Line 1:22: evaluation of [bytes_in == bytes_out] failed, treating result as null. Only first 20 failures recorded. @@ -109,7 +109,7 @@ warning:Line 1:22: java.lang.IllegalArgumentException: single-value function enc ; filterOnFieldsInequality -required_feature: esql.mv_warn +required_capability: mv_warn from ul_logs | sort id | where bytes_in < bytes_out | eval b_in = bytes_in / to_ul(pow(10.,15)), b_out = bytes_out / to_ul(pow(10.,15)) | limit 5; warning:Line 1:32: evaluation of [bytes_in < bytes_out] failed, treating result as null. Only first 20 failures recorded. @@ -140,7 +140,7 @@ from ul_logs | stats c = count(bytes_in) by bytes_in | sort c desc, bytes_in des ; case -required_feature: esql.mv_warn +required_capability: mv_warn from ul_logs | where case(bytes_in == to_ul(154551962150890564), true, false); warning:Line 1:27: evaluation of [bytes_in == to_ul(154551962150890564)] failed, treating result as null. Only first 20 failures recorded. @@ -151,7 +151,7 @@ warning:Line 1:27: java.lang.IllegalArgumentException: single-value function enc ; toDegrees -required_feature: esql.mv_warn +required_capability: mv_warn FROM ul_logs | WHERE bytes_in == bytes_out | EVAL deg = TO_DEGREES(bytes_in) | KEEP bytes_in, deg ; @@ -163,7 +163,7 @@ warning:Line 1:22: java.lang.IllegalArgumentException: single-value function enc ; toRadians -required_feature: esql.mv_warn +required_capability: mv_warn FROM ul_logs | WHERE bytes_in == bytes_out | EVAL rad = TO_RADIANS(bytes_in) | KEEP bytes_in, rad ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/version.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/version.csv-spec index 513189cc0fe8..3b6c41f88301 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/version.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/version.csv-spec @@ -312,7 +312,7 @@ null | null | null | 11 | 0 | 1.3.0 | 0.1 | no ; values -required_feature: esql.agg_values +required_capability: agg_values FROM apps | STATS version=MV_SORT(VALUES(version)) @@ -323,7 +323,7 @@ required_feature: esql.agg_values ; valuesGrouped -required_feature: esql.agg_values +required_capability: agg_values FROM apps | EVAL name=SUBSTRING(name, 0, 1) @@ -348,7 +348,7 @@ version:version | name:keyword ; valuesGroupedByOrdinals -required_feature: esql.agg_values +required_capability: agg_values FROM apps | STATS version=MV_SORT(VALUES(version)) BY name @@ -372,7 +372,7 @@ version:version | name:keyword ; implictCastingEqual -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from apps | where version == "1.2.3.4" | sort name | keep name, version; name:keyword | version:version @@ -381,7 +381,7 @@ hhhhh | 1.2.3.4 ; implictCastingNotEqual -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from apps | where version != "1.2.3.4" | sort name, version | keep name, version | limit 2; name:keyword | version:version @@ -390,7 +390,7 @@ bbbbb | 2.1 ; implictCastingGreaterThan -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from apps | where version > "1.2.3.4" | sort name, version | keep name, version | limit 2; name:keyword | version:version @@ -399,7 +399,7 @@ ccccc | 2.3.4 ; implictCastingLessThanOrEqual -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from apps | where version <= "1.2.3.4" | sort name, version | keep name, version | limit 2; name:keyword | version:version @@ -408,7 +408,7 @@ aaaaa | 1.2.3.4 ; implictCastingIn -required_feature: esql.string_literal_auto_casting_extended +required_capability: string_literal_auto_casting_extended from apps | where version in ( "1.2.3.4", "bad" ) | sort name | keep name, version; name:keyword | version:version diff --git a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java index 45d3653a28b6..af4595c5bbd7 100644 --- a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java +++ b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/CsvSpecReader.java @@ -43,8 +43,8 @@ public Object parse(String line) { if (line.startsWith(SCHEMA_PREFIX)) { assertThat("Early schema already declared " + earlySchema, earlySchema.length(), is(0)); earlySchema.append(line.substring(SCHEMA_PREFIX.length()).trim()); - } else if (line.toLowerCase(Locale.ROOT).startsWith("required_feature:")) { - requiredCapabilities.add(line.substring("required_feature:".length()).trim().replace("esql.", "")); + } else if (line.toLowerCase(Locale.ROOT).startsWith("required_capability:")) { + requiredCapabilities.add(line.substring("required_capability:".length()).trim()); } else { if (line.endsWith(";")) { // pick up the query From 1d62a7bb5ec620bfd47acdebcdaa15e2b2227a27 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 13 May 2024 13:02:24 -0400 Subject: [PATCH 099/119] ESQL: Fix logging test (#108520) This fixes the logging tests when run against multiple nodes. Closes #108367 --- .../xpack/esql/qa/single_node/RestEsqlIT.java | 25 +++++++++++-------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java index 072dc5265fe6..7c57212d0f57 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java @@ -118,15 +118,16 @@ public void testDoNotLogWithInfo() throws IOException { Map colA = Map.of("name", "DO_NOT_LOG_ME", "type", "integer"); assertEquals(List.of(colA), result.get("columns")); assertEquals(List.of(List.of(1)), result.get("values")); - try (InputStream log = cluster.getNodeLog(0, LogType.SERVER)) { - Streams.readAllLines(log, line -> { assertThat(line, not(containsString("DO_NOT_LOG_ME"))); }); + for (int i = 0; i < cluster.getNumNodes(); i++) { + try (InputStream log = cluster.getNodeLog(i, LogType.SERVER)) { + Streams.readAllLines(log, line -> assertThat(line, not(containsString("DO_NOT_LOG_ME")))); + } } } finally { setLoggingLevel(null); } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/108367") public void testDoLogWithDebug() throws IOException { try { setLoggingLevel("DEBUG"); @@ -136,15 +137,17 @@ public void testDoLogWithDebug() throws IOException { Map colA = Map.of("name", "DO_LOG_ME", "type", "integer"); assertEquals(List.of(colA), result.get("columns")); assertEquals(List.of(List.of(1)), result.get("values")); - try (InputStream log = cluster.getNodeLog(0, LogType.SERVER)) { - boolean[] found = new boolean[] { false }; - Streams.readAllLines(log, line -> { - if (line.contains("DO_LOG_ME")) { - found[0] = true; - } - }); - assertThat(found[0], equalTo(true)); + boolean[] found = new boolean[] { false }; + for (int i = 0; i < cluster.getNumNodes(); i++) { + try (InputStream log = cluster.getNodeLog(i, LogType.SERVER)) { + Streams.readAllLines(log, line -> { + if (line.contains("DO_LOG_ME")) { + found[0] = true; + } + }); + } } + assertThat(found[0], equalTo(true)); } finally { setLoggingLevel(null); } From 9977af92aae7f7e93ee1eda0ccecf954cb8d1a74 Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Mon, 13 May 2024 19:14:29 +0200 Subject: [PATCH 100/119] Add more values to estimated sizes for ES|QL field types (#108412) * Add more values to estimated sizes for ES|QL field types * Increased shape estimate to 200 and added version estimate --- .../xpack/esql/plan/physical/EstimatesRowSize.java | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EstimatesRowSize.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EstimatesRowSize.java index b79d7cc0fbdd..3d626e65f6f1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EstimatesRowSize.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EstimatesRowSize.java @@ -13,7 +13,6 @@ import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.type.DataType; -import org.elasticsearch.xpack.ql.type.DataTypes; import java.util.List; @@ -106,12 +105,13 @@ static int estimateSize(DataType dataType) { ElementType elementType = PlannerUtils.toElementType(dataType); return switch (elementType) { case BOOLEAN -> 1; - case BYTES_REF -> { - if (dataType == DataTypes.IP) { - yield 16; - } - yield 50; // wild estimate for the size of a string. - } + case BYTES_REF -> switch (dataType.typeName()) { + case "ip" -> 16; // IP addresses, both IPv4 and IPv6, are encoded using 16 bytes. + case "version" -> 15; // 8.15.2-SNAPSHOT is 15 bytes, most are shorter, some can be longer + case "geo_point", "cartesian_point" -> 21; // WKB for points is typically 21 bytes. + case "geo_shape", "cartesian_shape" -> 200; // wild estimate, based on some test data (airport_city_boundaries) + default -> 50; // wild estimate for the size of a string. + }; case DOC -> throw new EsqlIllegalArgumentException("can't load a [doc] with field extraction"); case DOUBLE -> Double.BYTES; case INT -> Integer.BYTES; From 89d666bf903b2bc632938d8d878e22a13a3c03bf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Mon, 13 May 2024 19:22:12 +0200 Subject: [PATCH 101/119] Adding override for lintian false positive on libvec.so (#108521) --- distribution/packages/src/deb/lintian/elasticsearch | 4 ++++ docs/changelog/108521.yaml | 6 ++++++ 2 files changed, 10 insertions(+) create mode 100644 docs/changelog/108521.yaml diff --git a/distribution/packages/src/deb/lintian/elasticsearch b/distribution/packages/src/deb/lintian/elasticsearch index a6a46bb41f11..edd705b66caa 100644 --- a/distribution/packages/src/deb/lintian/elasticsearch +++ b/distribution/packages/src/deb/lintian/elasticsearch @@ -59,3 +59,7 @@ unknown-field License # don't build them ourselves and the license precludes us modifying them # to fix this. library-not-linked-against-libc usr/share/elasticsearch/modules/x-pack-ml/platform/linux-x86_64/lib/libmkl_*.so + +# shared-lib-without-dependency-information (now shared-library-lacks-prerequisites) is falsely reported for libvec.so +# which has no dependencies (not even libc) besides the symbols in the base executable. +shared-lib-without-dependency-information usr/share/elasticsearch/lib/platform/linux-x64/libvec.so diff --git a/docs/changelog/108521.yaml b/docs/changelog/108521.yaml new file mode 100644 index 000000000000..adc7c11a4dec --- /dev/null +++ b/docs/changelog/108521.yaml @@ -0,0 +1,6 @@ +pr: 108521 +summary: Adding override for lintian false positive on `libvec.so` +area: "Packaging" +type: bug +issues: + - 108514 From e64f1b6317a22b3de3a9a42550ed51036bf77b68 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 13 May 2024 20:17:35 +0200 Subject: [PATCH 102/119] Add internalClusterTest for and fix leak in ExpandSearchPhase (#108562) `ExpandSearchPhase` was leaking `SearchHits` when a pooled `SearchHits` that was read from the wire was added to an unpooled `SearchHit`. This commit makes the relevant `SearchHit` instances that need to be pooled so they released nested hits, pooled. This requires a couple of smaller adjustments in the codebase, mainly around error handling. --- docs/changelog/108562.yaml | 6 +++ .../search/CollapseSearchResultsIT.java | 42 +++++++++++++++++++ .../action/search/ExpandSearchPhase.java | 1 + .../search/fetch/FetchPhase.java | 32 +++++++++----- .../search/fetch/FetchPhaseDocsIterator.java | 1 + .../search/fetch/FetchSearchResult.java | 7 +++- .../search/fetch/subphase/InnerHitsPhase.java | 1 + 7 files changed, 79 insertions(+), 11 deletions(-) create mode 100644 docs/changelog/108562.yaml create mode 100644 server/src/internalClusterTest/java/org/elasticsearch/search/CollapseSearchResultsIT.java diff --git a/docs/changelog/108562.yaml b/docs/changelog/108562.yaml new file mode 100644 index 000000000000..2a0047fe807f --- /dev/null +++ b/docs/changelog/108562.yaml @@ -0,0 +1,6 @@ +pr: 108562 +summary: Add `internalClusterTest` for and fix leak in `ExpandSearchPhase` +area: Search +type: bug +issues: + - 108369 diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/CollapseSearchResultsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/CollapseSearchResultsIT.java new file mode 100644 index 000000000000..a12a26d69c5f --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/CollapseSearchResultsIT.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.index.query.InnerHitBuilder; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.search.collapse.CollapseBuilder; +import org.elasticsearch.test.ESIntegTestCase; + +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; + +public class CollapseSearchResultsIT extends ESIntegTestCase { + + public void testCollapse() { + final String indexName = "test_collapse"; + createIndex(indexName); + final String collapseField = "collapse_field"; + assertAcked(indicesAdmin().preparePutMapping(indexName).setSource(collapseField, "type=keyword")); + index(indexName, "id_1", Map.of(collapseField, "value1")); + index(indexName, "id_2", Map.of(collapseField, "value2")); + refresh(indexName); + assertNoFailuresAndResponse( + prepareSearch(indexName).setQuery(new MatchAllQueryBuilder()) + .setCollapse(new CollapseBuilder(collapseField).setInnerHits(new InnerHitBuilder("ih").setSize(2))), + searchResponse -> { + assertEquals(collapseField, searchResponse.getHits().getCollapseField()); + assertEquals(Set.of(new BytesRef("value1"), new BytesRef("value2")), Set.of(searchResponse.getHits().getCollapseValues())); + } + ); + } +} diff --git a/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java index 48c2f1890ba0..e8470ba77632 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java @@ -101,6 +101,7 @@ private void doRun() { hit.setInnerHits(Maps.newMapWithExpectedSize(innerHitBuilders.size())); } hit.getInnerHits().put(innerHitBuilder.getName(), innerHits); + assert innerHits.isPooled() == false || hit.isPooled() : "pooled inner hits can only be added to a pooled hit"; innerHits.mustIncRef(); } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 4b5c647da0c9..0c54e8ff8958 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -167,23 +167,35 @@ protected SearchHit nextDoc(int doc) throws IOException { leafSourceLoader, leafIdLoader ); - sourceProvider.source = hit.source(); - fieldLookupProvider.setPreloadedStoredFieldValues(hit.hit().getId(), hit.loadedFields()); - for (FetchSubPhaseProcessor processor : processors) { - processor.process(hit); + boolean success = false; + try { + sourceProvider.source = hit.source(); + fieldLookupProvider.setPreloadedStoredFieldValues(hit.hit().getId(), hit.loadedFields()); + for (FetchSubPhaseProcessor processor : processors) { + processor.process(hit); + } + success = true; + return hit.hit(); + } finally { + if (success == false) { + hit.hit().decRef(); + } } - return hit.hit(); } }; SearchHit[] hits = docsIterator.iterate(context.shardTarget(), context.searcher().getIndexReader(), docIdsToLoad); if (context.isCancelled()) { + for (SearchHit hit : hits) { + // release all hits that would otherwise become owned and eventually released by SearchHits below + hit.decRef(); + } throw new TaskCancelledException("cancelled"); } TotalHits totalHits = context.getTotalHits(); - return SearchHits.unpooled(hits, totalHits, context.getMaxScore()); + return new SearchHits(hits, totalHits, context.getMaxScore()); } List getProcessors(SearchShardTarget target, FetchContext context, Profiler profiler) { @@ -257,12 +269,12 @@ private static HitContext prepareNonNestedHitContext( String id = idLoader.getId(subDocId); if (id == null) { - // TODO: can we use pooled buffers here as well? - SearchHit hit = SearchHit.unpooled(docId, null); + SearchHit hit = new SearchHit(docId); + // TODO: can we use real pooled buffers here as well? Source source = Source.lazy(lazyStoredSourceLoader(profiler, subReaderContext, subDocId)); return new HitContext(hit, subReaderContext, subDocId, Map.of(), source); } else { - SearchHit hit = SearchHit.unpooled(docId, id); + SearchHit hit = new SearchHit(docId, id); Source source; if (requiresSource) { Timer timer = profiler.startLoadingSource(); @@ -339,7 +351,7 @@ private static HitContext prepareNestedHitContext( assert nestedIdentity != null; Source nestedSource = nestedIdentity.extractSource(rootSource); - SearchHit hit = SearchHit.unpooled(topDocId, rootId, nestedIdentity); + SearchHit hit = new SearchHit(topDocId, rootId, nestedIdentity); return new HitContext(hit, subReaderContext, nestedInfo.doc(), childFieldLoader.storedFields(), nestedSource); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java index cc39113f2009..81b3e7465fee 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhaseDocsIterator.java @@ -67,6 +67,7 @@ public final SearchHit[] iterate(SearchShardTarget shardTarget, IndexReader inde setNextReader(ctx, docsInLeaf); } currentDoc = docs[i].docId; + assert searchHits[docs[i].index] == null; searchHits[docs[i].index] = nextDoc(docs[i].docId); } } catch (Exception e) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java index 4c3d3948ff88..4170f7e2f8b4 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchSearchResult.java @@ -61,8 +61,13 @@ public FetchSearchResult fetchResult() { public void shardResult(SearchHits hits, ProfileResult profileResult) { assert assertNoSearchTarget(hits); + assert hasReferences(); + var existing = this.hits; + if (existing != null) { + existing.decRef(); + } this.hits = hits; - hits.incRef(); + hits.mustIncRef(); assert this.profileResult == null; this.profileResult = profileResult; } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java index ccb54801472a..a4ba982e1dd7 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java @@ -104,6 +104,7 @@ private void hitExecute(Map innerHi } } var h = fetchResult.hits(); + assert hit.isPooled() || h.isPooled() == false; results.put(entry.getKey(), h); h.mustIncRef(); } From 6884002aabbdc161c2cf958a5a41a41389fb531a Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 13 May 2024 14:46:54 -0400 Subject: [PATCH 103/119] ESQL: Disable heap attack eval (#108581) It started failing again today. Relates https://github.com/elastic/elasticsearch-serverless/issues/1874 --- .../org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java index 5c034a81fc9c..e693f9a1562f 100644 --- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java @@ -269,6 +269,7 @@ public void testManyEval() throws IOException { assertMap(map, matchesMap().entry("columns", columns).entry("values", hasSize(10_000))); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch-serverless/issues/1874") public void testTooManyEval() throws IOException { initManyLongs(); assertCircuitBreaks(() -> manyEval(490)); From 93ec9d6142b54a505c9ca817f607de9ae1d14ac1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Slobodan=20Adamovi=C4=87?= Date: Mon, 13 May 2024 21:19:41 +0200 Subject: [PATCH 104/119] [Test] Ignore closed connections on Windows hosts (#108362) This commit adds special handling for the `java.io.IOException: An established connection was aborted by the software in your host machine` in `Netty4HttpClient#exceptionCaught ` method. This exception only occurs when running tests on Windows hosts Resolves: #108193 --- .../org/elasticsearch/http/netty4/Netty4HttpClient.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpClient.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpClient.java index 7ce962ff56b6..303521376658 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpClient.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpClient.java @@ -40,6 +40,7 @@ import org.elasticsearch.transport.netty4.NettyAllocator; import java.io.Closeable; +import java.io.IOException; import java.net.SocketAddress; import java.net.SocketException; import java.nio.charset.StandardCharsets; @@ -203,7 +204,11 @@ protected void channelRead0(ChannelHandlerContext ctx, HttpObject msg) { @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) { - if (cause instanceof PrematureChannelClosureException || cause instanceof SocketException) { + if (cause instanceof PrematureChannelClosureException + || cause instanceof SocketException + || (cause instanceof IOException + && cause.getMessage() != null + && cause.getMessage().contains("An established connection was aborted by the software in your host machine"))) { // no more requests coming, so fast-forward the latch fastForward(); } else { From 437acfaed1025b235b1901cf6fa1301a41fdf197 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 13 May 2024 21:07:08 +0100 Subject: [PATCH 105/119] `GetMlAutoscalingStats$Request` is not acknowledged (#108575) This request is using `AcknowledgedRequest` simply as a `MasterNodeRequest` which carries an extra timeout, but the timeout is not used for tracking acks of a cluster state update so we shouldn't be using `ackTimeout()` here. This commit changes it into a standard `MasterNodeRequest` plus an extra timeout, without changing its transport-protocol representation. --- .../core/ml/action/GetMlAutoscalingStats.java | 35 ++++++++++++++++--- .../GetMlAutoscalingStatsRequestTests.java | 7 ++-- 2 files changed, 35 insertions(+), 7 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStats.java index f57497368dec..c23d75f02937 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStats.java @@ -7,9 +7,10 @@ package org.elasticsearch.xpack.core.ml.action; +import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.TimeValue; @@ -35,14 +36,33 @@ public GetMlAutoscalingStats() { super(NAME); } - public static class Request extends AcknowledgedRequest { + public static class Request extends MasterNodeRequest { + private final TimeValue requestTimeout; + + public Request(TimeValue masterNodeTimeout, TimeValue requestTimeout) { + super(masterNodeTimeout); + this.requestTimeout = Objects.requireNonNull(requestTimeout); + } + + @Deprecated(forRemoval = true) // temporary compatibility shi public Request(TimeValue timeout) { - super(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout); + this(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout); } public Request(StreamInput in) throws IOException { super(in); + this.requestTimeout = in.readTimeValue(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeTimeValue(this.requestTimeout); + } + + public TimeValue requestTimeout() { + return requestTimeout; } @Override @@ -50,9 +70,14 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, return new CancellableTask(id, type, action, "get_ml_autoscaling_resources", parentTaskId, headers); } + @Override + public ActionRequestValidationException validate() { + return null; + } + @Override public int hashCode() { - return Objects.hash(ackTimeout()); + return Objects.hash(requestTimeout); } @Override @@ -64,7 +89,7 @@ public boolean equals(Object obj) { return false; } GetMlAutoscalingStats.Request other = (GetMlAutoscalingStats.Request) obj; - return Objects.equals(ackTimeout(), other.ackTimeout()); + return Objects.equals(requestTimeout, other.requestTimeout); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStatsRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStatsRequestTests.java index ee265538829d..eb0b8420625a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStatsRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStatsRequestTests.java @@ -23,11 +23,14 @@ protected Writeable.Reader instanceReader() { @Override protected Request createTestInstance() { - return new Request(randomTimeValue(0, 10_000)); + return new Request(TimeValue.THIRTY_SECONDS, randomTimeValue(0, 10_000)); } @Override protected Request mutateInstance(Request instance) throws IOException { - return new Request(TimeValue.timeValueMillis(instance.ackTimeout().millis() + randomIntBetween(1, 1000))); + return new Request( + TimeValue.THIRTY_SECONDS, + TimeValue.timeValueMillis(instance.requestTimeout().millis() + randomIntBetween(1, 1000)) + ); } } From 33009d443bbbb93a738c74c5aa67551b15ed8a75 Mon Sep 17 00:00:00 2001 From: Dianna Hohensee Date: Mon, 13 May 2024 16:44:58 -0400 Subject: [PATCH 106/119] Add distrib. arch. guide intro and autoscaling basics (#107284) --- docs/internal/DistributedArchitectureGuide.md | 110 +++++++++++++++++- 1 file changed, 105 insertions(+), 5 deletions(-) diff --git a/docs/internal/DistributedArchitectureGuide.md b/docs/internal/DistributedArchitectureGuide.md index 59305c630573..732e2e7be46f 100644 --- a/docs/internal/DistributedArchitectureGuide.md +++ b/docs/internal/DistributedArchitectureGuide.md @@ -1,6 +1,14 @@ -# Distributed Area Team Internals +# Distributed Area Internals -(Summary, brief discussion of our features) +The Distributed Area contains indexing and coordination systems. + +The index path stretches from the user REST command through shard routing down to each individual shard's translog and storage +engine. Reindexing is effectively reading from a source index and writing to a destination index (perhaps on different nodes). +The coordination side includes cluster coordination, shard allocation, cluster autoscaling stats, task management, and cross +cluster replication. Less obvious coordination systems include networking, the discovery plugin system, the snapshot/restore +logic, and shard recovery. + +A guide to the general Elasticsearch components can be found [here](https://github.com/elastic/elasticsearch/blob/main/docs/internal/GeneralArchitectureGuide.md). # Networking @@ -237,9 +245,101 @@ works in parallel with the storage engine.) # Autoscaling -(Reactive and proactive autoscaling. Explain that we surface recommendations, how control plane uses it.) - -(Sketch / list the different deciders that we have, and then also how we use information from each to make a recommendation.) +The Autoscaling API in ES (Elasticsearch) uses cluster and node level statistics to provide a recommendation +for a cluster size to support the current cluster data and active workloads. ES Autoscaling is paired +with an ES Cloud service that periodically polls the ES elected master node for suggested cluster +changes. The cloud service will add more resources to the cluster based on Elasticsearch's recommendation. +Elasticsearch by itself cannot automatically scale. + +Autoscaling recommendations are tailored for the user [based on user defined policies][], composed of data +roles (hot, frozen, etc) and [deciders][]. There's a public [webinar on autoscaling][], as well as the +public [Autoscaling APIs] docs. + +Autoscaling's current implementation is based primary on storage requirements, as well as memory capacity +for ML and frozen tier. It does not yet support scaling related to search load. Paired with ES Cloud, +autoscaling only scales upward, not downward, except for ML nodes that do get scaled up _and_ down. + +[based on user defined policies]: https://www.elastic.co/guide/en/elasticsearch/reference/current/xpack-autoscaling.html +[deciders]: https://www.elastic.co/guide/en/elasticsearch/reference/current/autoscaling-deciders.html +[webinar on autoscaling]: https://www.elastic.co/webinars/autoscaling-from-zero-to-production-seamlessly +[Autoscaling APIs]: https://www.elastic.co/guide/en/elasticsearch/reference/current/autoscaling-apis.html + +### Plugin REST and TransportAction entrypoints + +Autoscaling is a [plugin][]. All the REST APIs can be found in [autoscaling/rest/][]. +`GetAutoscalingCapacityAction` is the capacity calculation operation REST endpoint, as opposed to the +other rest commands that get/set/delete the policies guiding the capacity calculation. The Transport +Actions can be found in [autoscaling/action/], where [TransportGetAutoscalingCapacityAction][] is the +entrypoint on the master node for calculating the optimal cluster resources based on the autoscaling +policies. + +[plugin]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/Autoscaling.java#L72 +[autoscaling/rest/]: https://github.com/elastic/elasticsearch/tree/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/rest +[autoscaling/action/]: https://github.com/elastic/elasticsearch/tree/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action +[TransportGetAutoscalingCapacityAction]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/TransportGetAutoscalingCapacityAction.java#L82-L98 + +### How cluster capacity is determined + +[AutoscalingMetadata][] implements [Metadata.Custom][] in order to persist autoscaling policies. Each +Decider is an implementation of [AutoscalingDeciderService][]. The [AutoscalingCalculateCapacityService][] +is responsible for running the calculation. + +[TransportGetAutoscalingCapacityAction.computeCapacity] is the entry point to [AutoscalingCalculateCapacityService.calculate], +which creates a [AutoscalingDeciderResults][] for [each autoscaling policy][]. [AutoscalingDeciderResults.toXContent][] then +determines the [maximum required capacity][] to return to the caller. [AutoscalingCapacity][] is the base unit of a cluster +resources recommendation. + +The `TransportGetAutoscalingCapacityAction` response is cached to prevent concurrent callers +overloading the system: the operation is expensive. `TransportGetAutoscalingCapacityAction` contains +a [CapacityResponseCache][]. `TransportGetAutoscalingCapacityAction.masterOperation` +calls [through the CapacityResponseCache][], into the `AutoscalingCalculateCapacityService`, to handle +concurrent callers. + +[AutoscalingMetadata]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/AutoscalingMetadata.java#L38 +[Metadata.Custom]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java#L141-L145 +[AutoscalingDeciderService]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingDeciderService.java#L16-L19 +[AutoscalingCalculateCapacityService]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingCalculateCapacityService.java#L43 + +[TransportGetAutoscalingCapacityAction.computeCapacity]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/TransportGetAutoscalingCapacityAction.java#L102-L108 +[AutoscalingCalculateCapacityService.calculate]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingCalculateCapacityService.java#L108-L139 +[AutoscalingDeciderResults]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingDeciderResults.java#L34-L38 +[each autoscaling policy]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingCalculateCapacityService.java#L124-L131 +[AutoscalingDeciderResults.toXContent]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingDeciderResults.java#L78 +[maximum required capacity]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingDeciderResults.java#L105-L116 +[AutoscalingCapacity]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/capacity/AutoscalingCapacity.java#L27-L35 + +[CapacityResponseCache]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/TransportGetAutoscalingCapacityAction.java#L44-L47 +[through the CapacityResponseCache]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/action/TransportGetAutoscalingCapacityAction.java#L97 + +### Where the data comes from + +The Deciders each pull data from different sources as needed to inform their decisions. The +[DiskThresholdMonitor][] is one such data source. The Monitor runs on the master node and maintains +lists of nodes that exceed various disk size thresholds. [DiskThresholdSettings][] contains the +threshold settings with which the `DiskThresholdMonitor` runs. + +[DiskThresholdMonitor]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitor.java#L53-L58 +[DiskThresholdSettings]: https://github.com/elastic/elasticsearch/blob/v8.13.2/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdSettings.java#L24-L27 + +### Deciders + +The `ReactiveStorageDeciderService` tracks information that demonstrates storage limitations are causing +problems in the cluster. It uses [an algorithm defined here][]. Some examples are +- information from the `DiskThresholdMonitor` to find out whether nodes are exceeding their storage capacity +- number of unassigned shards that failed allocation because of insufficient storage +- the max shard size and minimum node size, and whether these can be satisfied with the existing infrastructure + +[an algorithm defined here]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java#L158-L176 + +The `ProactiveStorageDeciderService` maintains a forecast window that [defaults to 30 minutes][]. It only +runs on data streams (ILM, rollover, etc), not regular indexes. It looks at past [index changes][] that +took place within the forecast window to [predict][] resources that will be needed shortly. + +[defaults to 30 minutes]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ProactiveStorageDeciderService.java#L32 +[index changes]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ProactiveStorageDeciderService.java#L79-L83 +[predict]: https://github.com/elastic/elasticsearch/blob/v8.13.2/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ProactiveStorageDeciderService.java#L85-L95 + +There are several more Decider Services, implementing the `AutoscalingDeciderService` interface. # Snapshot / Restore From 1059a0f3ff539dd27359fdbd352c4081ab80b16d Mon Sep 17 00:00:00 2001 From: Dianna Hohensee Date: Mon, 13 May 2024 16:53:21 -0400 Subject: [PATCH 107/119] Make max shard limit variable public, and clearer variable names (#108275) Relates ES-8244 --- .../DataStreamIndexSettingsProvider.java | 25 +++++++++++-------- .../cluster/metadata/IndexMetadata.java | 16 +++++++----- .../cluster/routing/allocation/DataTier.java | 9 ++++--- .../index/IndexSettingProvider.java | 13 +++++----- 4 files changed, 37 insertions(+), 26 deletions(-) diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java index 88e529ec5569..f5fa0db83923 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettingProvider; @@ -56,11 +57,11 @@ public class DataStreamIndexSettingsProvider implements IndexSettingProvider { @Override public Settings getAdditionalIndexSettings( String indexName, - String dataStreamName, - boolean timeSeries, + @Nullable String dataStreamName, + boolean isTimeSeries, Metadata metadata, Instant resolvedAt, - Settings allSettings, + Settings indexTemplateAndCreateRequestSettings, List combinedTemplateMappings ) { if (dataStreamName != null) { @@ -70,13 +71,13 @@ public Settings getAdditionalIndexSettings( // so checking that index_mode==null|standard and templateIndexMode == TIME_SERIES boolean migrating = dataStream != null && (dataStream.getIndexMode() == null || dataStream.getIndexMode() == IndexMode.STANDARD) - && timeSeries; + && isTimeSeries; IndexMode indexMode; if (migrating) { indexMode = IndexMode.TIME_SERIES; } else if (dataStream != null) { - indexMode = timeSeries ? dataStream.getIndexMode() : null; - } else if (timeSeries) { + indexMode = isTimeSeries ? dataStream.getIndexMode() : null; + } else if (isTimeSeries) { indexMode = IndexMode.TIME_SERIES; } else { indexMode = null; @@ -84,8 +85,8 @@ public Settings getAdditionalIndexSettings( if (indexMode != null) { if (indexMode == IndexMode.TIME_SERIES) { Settings.Builder builder = Settings.builder(); - TimeValue lookAheadTime = DataStreamsPlugin.getLookAheadTime(allSettings); - TimeValue lookBackTime = DataStreamsPlugin.LOOK_BACK_TIME.get(allSettings); + TimeValue lookAheadTime = DataStreamsPlugin.getLookAheadTime(indexTemplateAndCreateRequestSettings); + TimeValue lookBackTime = DataStreamsPlugin.LOOK_BACK_TIME.get(indexTemplateAndCreateRequestSettings); final Instant start; final Instant end; if (dataStream == null || migrating) { @@ -114,9 +115,13 @@ public Settings getAdditionalIndexSettings( builder.put(IndexSettings.TIME_SERIES_START_TIME.getKey(), FORMATTER.format(start)); builder.put(IndexSettings.TIME_SERIES_END_TIME.getKey(), FORMATTER.format(end)); - if (allSettings.hasValue(IndexMetadata.INDEX_ROUTING_PATH.getKey()) == false + if (indexTemplateAndCreateRequestSettings.hasValue(IndexMetadata.INDEX_ROUTING_PATH.getKey()) == false && combinedTemplateMappings.isEmpty() == false) { - List routingPaths = findRoutingPaths(indexName, allSettings, combinedTemplateMappings); + List routingPaths = findRoutingPaths( + indexName, + indexTemplateAndCreateRequestSettings, + combinedTemplateMappings + ); if (routingPaths.isEmpty() == false) { builder.putList(INDEX_ROUTING_PATH.getKey(), routingPaths); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java index 678655252248..681dcb3e314e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java @@ -189,13 +189,17 @@ public void writeTo(StreamOutput out) throws IOException { } } + /** + * This is a safety limit that should only be exceeded in very rare and special cases. The assumption is that + * 99% of the users have less than 1024 shards per index. We also make it a hard check that requires restart of nodes + * if a cluster should allow to create more than 1024 shards per index. NOTE: this does not limit the number of shards + * per cluster. this also prevents creating stuff like a new index with millions of shards by accident which essentially + * kills the entire cluster with OOM on the spot. + */ + public static final String PER_INDEX_MAX_NUMBER_OF_SHARDS = "1024"; + static Setting buildNumberOfShardsSetting() { - /* This is a safety limit that should only be exceeded in very rare and special cases. The assumption is that - * 99% of the users have less than 1024 shards per index. We also make it a hard check that requires restart of nodes - * if a cluster should allow to create more than 1024 shards per index. NOTE: this does not limit the number of shards - * per cluster. this also prevents creating stuff like a new index with millions of shards by accident which essentially - * kills the entire cluster with OOM on the spot.*/ - final int maxNumShards = Integer.parseInt(System.getProperty("es.index.max_number_of_shards", "1024")); + final int maxNumShards = Integer.parseInt(System.getProperty("es.index.max_number_of_shards", PER_INDEX_MAX_NUMBER_OF_SHARDS)); if (maxNumShards < 1) { throw new IllegalArgumentException("es.index.max_number_of_shards must be > 0"); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DataTier.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DataTier.java index ebdf6e4b3d8e..3b1257a51074 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DataTier.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DataTier.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettingProvider; import org.elasticsearch.snapshots.SearchableSnapshotsSettings; @@ -223,14 +224,14 @@ public static class DefaultHotAllocationSettingProvider implements IndexSettingP @Override public Settings getAdditionalIndexSettings( String indexName, - String dataStreamName, - boolean timeSeries, + @Nullable String dataStreamName, + boolean isTimeSeries, Metadata metadata, Instant resolvedAt, - Settings allSettings, + Settings indexTemplateAndCreateRequestSettings, List combinedTemplateMappings ) { - Set settings = allSettings.keySet(); + Set settings = indexTemplateAndCreateRequestSettings.keySet(); if (settings.contains(TIER_PREFERENCE)) { // just a marker -- this null value will be removed or overridden by the template/request settings return NULL_TIER_PREFERENCE_SETTINGS; diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java b/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java index e67196c9090c..bbf7cc3e0e1e 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettingProvider.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.core.Nullable; import org.elasticsearch.index.mapper.MapperService; import java.io.IOException; @@ -31,20 +32,20 @@ public interface IndexSettingProvider { * @param indexName The name of the new index being created * @param dataStreamName The name of the data stream if the index being created is part of a data stream otherwise * null - * @param timeSeries Whether the template is in time series mode. + * @param isTimeSeries Whether the template is in time series mode. * @param metadata The current metadata instance that doesn't yet contain the index to be created * @param resolvedAt The time the request to create this new index was accepted. - * @param allSettings All the setting resolved from the template that matches and any setting defined on the create index - * request + * @param indexTemplateAndCreateRequestSettings All the settings resolved from the template that matches and any settings + * defined on the create index request * @param combinedTemplateMappings All the mappings resolved from the template that matches */ Settings getAdditionalIndexSettings( String indexName, - String dataStreamName, - boolean timeSeries, + @Nullable String dataStreamName, + boolean isTimeSeries, Metadata metadata, Instant resolvedAt, - Settings allSettings, + Settings indexTemplateAndCreateRequestSettings, List combinedTemplateMappings ); From e600d4186c927425c7bece685d8ed75b62a1413c Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Mon, 13 May 2024 22:55:58 +0200 Subject: [PATCH 108/119] Add feature flag for semantic_text non-snapshot YAML tests (#108585) --- .../main/java/org/elasticsearch/test/cluster/FeatureFlag.java | 3 ++- .../org/elasticsearch/xpack/inference/InferenceRestIT.java | 2 ++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java index 49fb38b518dc..d555337f467a 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java @@ -16,7 +16,8 @@ */ public enum FeatureFlag { TIME_SERIES_MODE("es.index_mode_feature_flag_registered=true", Version.fromString("8.0.0"), null), - FAILURE_STORE_ENABLED("es.failure_store_feature_flag_enabled=true", Version.fromString("8.12.0"), null); + FAILURE_STORE_ENABLED("es.failure_store_feature_flag_enabled=true", Version.fromString("8.12.0"), null), + SEMANTIC_TEXT_ENABLED("es.semantic_text_feature_flag_enabled=true", Version.fromString("8.15.0"), null); public final String systemProperty; public final Version from; diff --git a/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java b/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java index a397d9864d23..2f6127c44957 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java +++ b/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java @@ -10,6 +10,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; @@ -22,6 +23,7 @@ public class InferenceRestIT extends ESClientYamlSuiteTestCase { .setting("xpack.security.enabled", "false") .setting("xpack.security.http.ssl.enabled", "false") .plugin("inference-service-test") + .feature(FeatureFlag.SEMANTIC_TEXT_ENABLED) .distribution(DistributionType.DEFAULT) .build(); From 2fbeb3ca831e2472950db4f9ce213fa272d22865 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Mon, 13 May 2024 16:34:14 -0700 Subject: [PATCH 109/119] Add Java 23 to testing matrix --- .buildkite/pipelines/periodic.template.yml | 2 ++ .buildkite/pipelines/periodic.yml | 2 ++ 2 files changed, 4 insertions(+) diff --git a/.buildkite/pipelines/periodic.template.yml b/.buildkite/pipelines/periodic.template.yml index fda4315926b6..207a332ed671 100644 --- a/.buildkite/pipelines/periodic.template.yml +++ b/.buildkite/pipelines/periodic.template.yml @@ -88,6 +88,7 @@ steps: - openjdk17 - openjdk21 - openjdk22 + - openjdk23 GRADLE_TASK: - checkPart1 - checkPart2 @@ -113,6 +114,7 @@ steps: - openjdk17 - openjdk21 - openjdk22 + - openjdk23 BWC_VERSION: $BWC_LIST agents: provider: gcp diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 7c5f683cf969..7ba46f0f0951 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -735,6 +735,7 @@ steps: - openjdk17 - openjdk21 - openjdk22 + - openjdk23 GRADLE_TASK: - checkPart1 - checkPart2 @@ -760,6 +761,7 @@ steps: - openjdk17 - openjdk21 - openjdk22 + - openjdk23 BWC_VERSION: ["7.17.22", "8.13.5", "8.14.0", "8.15.0"] agents: provider: gcp From bf96968ea044658c9c7b813a1f608749abad6f04 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 13 May 2024 21:53:39 -0700 Subject: [PATCH 110/119] Debug docker readiness tests (#108587) --- .../test/java/org/elasticsearch/packaging/test/DockerTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java index dc4e24959a5c..81ac8ab1200f 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java @@ -1211,7 +1211,6 @@ private List listPlugins() { /** * Check that readiness listener works */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/108523") public void test500Readiness() throws Exception { assertFalse(readinessProbe(9399)); // Disabling security so we wait for green @@ -1220,6 +1219,7 @@ public void test500Readiness() throws Exception { builder().envVar("readiness.port", "9399").envVar("xpack.security.enabled", "false").envVar("discovery.type", "single-node") ); waitForElasticsearch(installation); + dumpDebug(); assertTrue(readinessProbe(9399)); } From 8d9cd8965941679ffdf22bc0bb765bdf0963f521 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 14 May 2024 09:06:23 +0300 Subject: [PATCH 111/119] Cluster state role mapper file settings service (#108555) This PR simplifies the ReservedRoleMappingAction implementation, which is part of the FileSettingsService infrastructure, such that it stores the role mappings it parses from the settings.json file into the cluster state custom metadata that's used by the new ClusterStateRoleMapper. The native role mappings (stored in the .security index) are left untouched by the ReservedRoleMappingAction. --- docs/changelog/107886.yaml | 5 + .../rolemapping/PutRoleMappingRequest.java | 12 - .../PutRoleMappingRequestBuilder.java | 7 +- .../RoleMappingFileSettingsIT.java | 292 +++++++++++------- .../FileSettingsRoleMappingsStartupIT.java | 148 --------- .../xpack/security/Security.java | 3 +- .../ReservedRoleMappingAction.java | 136 ++------ .../TransportDeleteRoleMappingAction.java | 32 +- .../TransportPutRoleMappingAction.java | 28 +- .../rolemapping/RestPutRoleMappingAction.java | 21 +- ...dUnstableSecurityStateHandlerProvider.java | 28 -- .../security/UnstableLocalStateSecurity.java | 97 ------ .../ReservedRoleMappingActionTests.java | 152 +-------- ...TransportDeleteRoleMappingActionTests.java | 45 --- .../TransportPutRoleMappingActionTests.java | 41 +-- ...dstate.ReservedClusterStateHandlerProvider | 1 - .../ldap/AbstractAdLdapRealmTestCase.java | 18 +- 17 files changed, 268 insertions(+), 798 deletions(-) create mode 100644 docs/changelog/107886.yaml delete mode 100644 x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsStartupIT.java delete mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalReservedUnstableSecurityStateHandlerProvider.java delete mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/UnstableLocalStateSecurity.java delete mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingActionTests.java diff --git a/docs/changelog/107886.yaml b/docs/changelog/107886.yaml new file mode 100644 index 000000000000..a328bc2a2a20 --- /dev/null +++ b/docs/changelog/107886.yaml @@ -0,0 +1,5 @@ +pr: 107886 +summary: Cluster state role mapper file settings service +area: Authorization +type: enhancement +issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequest.java index 039ed8aa5fb6..f85ca260c3ff 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequest.java @@ -166,16 +166,4 @@ public void writeTo(StreamOutput out) throws IOException { public ExpressionRoleMapping getMapping() { return new ExpressionRoleMapping(name, rules, roles, roleTemplates, metadata, enabled); } - - public static PutRoleMappingRequest fromMapping(ExpressionRoleMapping mapping) { - var request = new PutRoleMappingRequest(); - request.setName(mapping.getName()); - request.setEnabled(mapping.isEnabled()); - request.setRoles(mapping.getRoles()); - request.setRoleTemplates(mapping.getRoleTemplates()); - request.setRules(mapping.getExpression()); - request.setMetadata(mapping.getMetadata()); - - return request; - } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequestBuilder.java index 88a930063190..d46c21f08030 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingRequestBuilder.java @@ -9,8 +9,7 @@ import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.support.WriteRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; import org.elasticsearch.xpack.core.security.authc.support.mapper.TemplateRoleName; import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.RoleMapperExpression; @@ -35,8 +34,8 @@ public PutRoleMappingRequestBuilder(ElasticsearchClient client) { /** * Populate the put role request from the source and the role's name */ - public PutRoleMappingRequestBuilder source(String name, BytesReference source, XContentType xContentType) throws IOException { - ExpressionRoleMapping mapping = ExpressionRoleMapping.parse(name, source, xContentType); + public PutRoleMappingRequestBuilder source(String name, XContentParser parser) throws IOException { + ExpressionRoleMapping mapping = ExpressionRoleMapping.parse(name, parser); request.setName(name); request.setEnabled(mapping.isEnabled()); request.setRoles(mapping.getRoles()); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java index 7c753692628c..286a9cb736b1 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/RoleMappingFileSettingsIT.java @@ -7,11 +7,13 @@ package org.elasticsearch.integration; +import org.apache.logging.log4j.Logger; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; -import org.elasticsearch.action.admin.indices.close.CloseIndexResponse; +import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.metadata.ReservedStateErrorMetadata; @@ -25,10 +27,15 @@ import org.elasticsearch.reservedstate.service.FileSettingsService; import org.elasticsearch.test.NativeRealmIntegTestCase; import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingAction; +import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsAction; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingAction; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; +import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; +import org.elasticsearch.xpack.core.security.authc.RealmConfig; +import org.elasticsearch.xpack.core.security.authc.support.UserRoleMapper; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; import org.elasticsearch.xpack.security.action.rolemapping.ReservedRoleMappingAction; import org.junit.After; @@ -39,25 +46,31 @@ import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; -import java.util.stream.Collectors; +import java.util.function.Consumer; import static org.elasticsearch.indices.recovery.RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING; import static org.elasticsearch.xcontent.XContentType.JSON; import static org.elasticsearch.xpack.core.security.test.TestRestrictedIndices.INTERNAL_SECURITY_MAIN_INDEX_7; import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.emptyArray; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.notNullValue; +import static org.mockito.Mockito.mock; /** - * Tests that file settings service can properly add role mappings and detect REST clashes - * with the reserved role mappings. + * Tests that file settings service can properly add role mappings. */ public class RoleMappingFileSettingsIT extends NativeRealmIntegTestCase { @@ -135,12 +148,21 @@ public class RoleMappingFileSettingsIT extends NativeRealmIntegTestCase { } }"""; + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + Settings.Builder builder = Settings.builder() + .put(super.nodeSettings(nodeOrdinal, otherSettings)) + // some tests make use of cluster-state based role mappings + .put("xpack.security.authc.cluster_state_role_mappings.enabled", true); + return builder.build(); + } + @After public void cleanUp() { updateClusterSettings(Settings.builder().putNull("indices.recovery.max_bytes_per_sec")); } - private void writeJSONFile(String node, String json) throws Exception { + public static void writeJSONFile(String node, String json, Logger logger, AtomicLong versionCounter) throws Exception { long version = versionCounter.incrementAndGet(); FileSettingsService fileSettingsService = internalCluster().getInstance(FileSettingsService.class, node); @@ -151,10 +173,11 @@ private void writeJSONFile(String node, String json) throws Exception { Files.createDirectories(fileSettingsService.watchedFileDir()); Path tempFilePath = createTempFile(); - logger.info("--> writing JSON config to node {} with path {}", node, tempFilePath); + logger.info("--> before writing JSON config to node {} with path {}", node, tempFilePath); logger.info(Strings.format(json, version)); Files.write(tempFilePath, Strings.format(json, version).getBytes(StandardCharsets.UTF_8)); Files.move(tempFilePath, fileSettingsService.watchedFile(), StandardCopyOption.ATOMIC_MOVE); + logger.info("--> after writing JSON config to node {} with path {}", node, tempFilePath); } private Tuple setupClusterStateListener(String node, String expectedKey) { @@ -238,49 +261,41 @@ private void assertRoleMappingsSaveOK(CountDownLatch savedClusterState, AtomicLo expectThrows(ExecutionException.class, () -> clusterAdmin().updateSettings(req).get()).getMessage() ); + for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { + PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); + userRoleMapper.resolveRoles( + new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), + resolveRolesFuture + ); + assertThat(resolveRolesFuture.get(), containsInAnyOrder("kibana_user", "fleet_user")); + } + + // the role mappings are not retrievable by the role mapping action (which only accesses "native" i.e. index-based role mappings) var request = new GetRoleMappingsRequest(); request.setNames("everyone_kibana", "everyone_fleet"); var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); - assertTrue(response.hasMappings()); - assertThat( - Arrays.stream(response.mappings()).map(r -> r.getName()).collect(Collectors.toSet()), - allOf(notNullValue(), containsInAnyOrder("everyone_kibana", "everyone_fleet")) - ); + assertFalse(response.hasMappings()); + assertThat(response.mappings(), emptyArray()); - // Try using the REST API to update the everyone_kibana role mapping - // This should fail, we have reserved certain role mappings in operator mode - assertEquals( - "Failed to process request " - + "[org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest/unset] " - + "with errors: [[everyone_kibana] set as read-only by [file_settings]]", - expectThrows( - IllegalArgumentException.class, - () -> client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_kibana")).actionGet() - ).getMessage() - ); - assertEquals( - "Failed to process request " - + "[org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest/unset] " - + "with errors: [[everyone_fleet] set as read-only by [file_settings]]", - expectThrows( - IllegalArgumentException.class, - () -> client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_fleet")).actionGet() - ).getMessage() - ); + // role mappings (with the same names) can also be stored in the "native" store + var putRoleMappingResponse = client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_kibana")).actionGet(); + assertTrue(putRoleMappingResponse.isCreated()); + putRoleMappingResponse = client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_fleet")).actionGet(); + assertTrue(putRoleMappingResponse.isCreated()); } public void testRoleMappingsApplied() throws Exception { ensureGreen(); var savedClusterState = setupClusterStateListener(internalCluster().getMasterName(), "everyone_kibana"); - writeJSONFile(internalCluster().getMasterName(), testJSON); + writeJSONFile(internalCluster().getMasterName(), testJSON, logger, versionCounter); assertRoleMappingsSaveOK(savedClusterState.v1(), savedClusterState.v2()); logger.info("---> cleanup cluster settings..."); savedClusterState = setupClusterStateListenerForCleanup(internalCluster().getMasterName()); - writeJSONFile(internalCluster().getMasterName(), emptyJSON); + writeJSONFile(internalCluster().getMasterName(), emptyJSON, logger, versionCounter); boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); assertTrue(awaitSuccessful); @@ -292,32 +307,65 @@ public void testRoleMappingsApplied() throws Exception { clusterStateResponse.getState().metadata().persistentSettings().get(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey()) ); - var request = new GetRoleMappingsRequest(); - request.setNames("everyone_kibana", "everyone_fleet"); - var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); - assertFalse(response.hasMappings()); + // native role mappings are not affected by the removal of the cluster-state based ones + { + var request = new GetRoleMappingsRequest(); + request.setNames("everyone_kibana", "everyone_fleet"); + var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); + assertTrue(response.hasMappings()); + assertThat( + Arrays.stream(response.mappings()).map(ExpressionRoleMapping::getName).toList(), + containsInAnyOrder("everyone_kibana", "everyone_fleet") + ); + } + + // and roles are resolved based on the native role mappings + for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { + PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); + userRoleMapper.resolveRoles( + new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), + resolveRolesFuture + ); + assertThat(resolveRolesFuture.get(), contains("kibana_user_native")); + } + + { + var request = new DeleteRoleMappingRequest(); + request.setName("everyone_kibana"); + var response = client().execute(DeleteRoleMappingAction.INSTANCE, request).get(); + assertTrue(response.isFound()); + request = new DeleteRoleMappingRequest(); + request.setName("everyone_fleet"); + response = client().execute(DeleteRoleMappingAction.INSTANCE, request).get(); + assertTrue(response.isFound()); + } + + // no roles are resolved now, because both native and cluster-state based stores have been cleared + for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { + PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); + userRoleMapper.resolveRoles( + new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), + resolveRolesFuture + ); + assertThat(resolveRolesFuture.get(), empty()); + } } - private Tuple setupClusterStateListenerForError(String node) { - ClusterService clusterService = internalCluster().clusterService(node); + public static Tuple setupClusterStateListenerForError( + ClusterService clusterService, + Consumer errorMetadataConsumer + ) { CountDownLatch savedClusterState = new CountDownLatch(1); AtomicLong metadataVersion = new AtomicLong(-1); clusterService.addListener(new ClusterStateListener() { @Override public void clusterChanged(ClusterChangedEvent event) { ReservedStateMetadata reservedState = event.state().metadata().reservedStateMetadata().get(FileSettingsService.NAMESPACE); - if (reservedState != null - && reservedState.errorMetadata() != null - && reservedState.errorMetadata().errorKind() == ReservedStateErrorMetadata.ErrorKind.PARSING) { + if (reservedState != null && reservedState.errorMetadata() != null) { clusterService.removeListener(this); metadataVersion.set(event.state().metadata().version()); savedClusterState.countDown(); - assertEquals(ReservedStateErrorMetadata.ErrorKind.PARSING, reservedState.errorMetadata().errorKind()); - assertThat(reservedState.errorMetadata().errors(), allOf(notNullValue(), hasSize(1))); - assertThat( - reservedState.errorMetadata().errors().get(0), - containsString("failed to parse role-mapping [everyone_kibana_bad]. missing field [rules]") - ); + errorMetadataConsumer.accept(reservedState.errorMetadata()); } } }); @@ -325,22 +373,13 @@ public void clusterChanged(ClusterChangedEvent event) { return new Tuple<>(savedClusterState, metadataVersion); } - private void assertRoleMappingsNotSaved(CountDownLatch savedClusterState, AtomicLong metadataVersion) throws Exception { - boolean awaitSuccessful = savedClusterState.await(20, TimeUnit.SECONDS); - assertTrue(awaitSuccessful); - - // This should succeed, nothing was reserved - client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_kibana_bad")).get(); - client().execute(PutRoleMappingAction.INSTANCE, sampleRestRequest("everyone_fleet_ok")).get(); - } - public void testErrorSaved() throws Exception { ensureGreen(); // save an empty file to clear any prior state, this ensures we don't get a stale file left over by another test var savedClusterState = setupClusterStateListenerForCleanup(internalCluster().getMasterName()); - writeJSONFile(internalCluster().getMasterName(), emptyJSON); + writeJSONFile(internalCluster().getMasterName(), emptyJSON, logger, versionCounter); boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); assertTrue(awaitSuccessful); @@ -353,76 +392,94 @@ public void testErrorSaved() throws Exception { ); // save a bad file - savedClusterState = setupClusterStateListenerForError(internalCluster().getMasterName()); - - writeJSONFile(internalCluster().getMasterName(), testErrorJSON); - assertRoleMappingsNotSaved(savedClusterState.v1(), savedClusterState.v2()); - } - - private Tuple setupClusterStateListenerForSecurityWriteError(String node) { - ClusterService clusterService = internalCluster().clusterService(node); - CountDownLatch savedClusterState = new CountDownLatch(1); - AtomicLong metadataVersion = new AtomicLong(-1); - clusterService.addListener(new ClusterStateListener() { - @Override - public void clusterChanged(ClusterChangedEvent event) { - ReservedStateMetadata reservedState = event.state().metadata().reservedStateMetadata().get(FileSettingsService.NAMESPACE); - if (reservedState != null - && reservedState.errorMetadata() != null - && reservedState.errorMetadata().errorKind() == ReservedStateErrorMetadata.ErrorKind.VALIDATION) { - clusterService.removeListener(this); - metadataVersion.set(event.state().metadata().version()); - savedClusterState.countDown(); - assertEquals(ReservedStateErrorMetadata.ErrorKind.VALIDATION, reservedState.errorMetadata().errorKind()); - assertThat(reservedState.errorMetadata().errors(), allOf(notNullValue(), hasSize(1))); - assertThat(reservedState.errorMetadata().errors().get(0), containsString("closed")); - } + savedClusterState = setupClusterStateListenerForError( + internalCluster().getCurrentMasterNodeInstance(ClusterService.class), + errorMetadata -> { + assertEquals(ReservedStateErrorMetadata.ErrorKind.PARSING, errorMetadata.errorKind()); + assertThat(errorMetadata.errors(), allOf(notNullValue(), hasSize(1))); + assertThat( + errorMetadata.errors().get(0), + containsString("failed to parse role-mapping [everyone_kibana_bad]. missing field [rules]") + ); } - }); - - return new Tuple<>(savedClusterState, metadataVersion); - } - - public void testRoleMappingFailsToWriteToStore() throws Exception { - ensureGreen(); - - var savedClusterState = setupClusterStateListenerForSecurityWriteError(internalCluster().getMasterName()); - - final CloseIndexResponse closeIndexResponse = indicesAdmin().close(new CloseIndexRequest(INTERNAL_SECURITY_MAIN_INDEX_7)).get(); - assertTrue(closeIndexResponse.isAcknowledged()); + ); - writeJSONFile(internalCluster().getMasterName(), testJSON); - boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); + writeJSONFile(internalCluster().getMasterName(), testErrorJSON, logger, versionCounter); + awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); assertTrue(awaitSuccessful); - var request = new GetRoleMappingsRequest(); - request.setNames("everyone_kibana", "everyone_fleet"); - - var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); - assertFalse(response.hasMappings()); - - final ClusterStateResponse clusterStateResponse = clusterAdmin().state( - new ClusterStateRequest().waitForMetadataVersion(savedClusterState.v2().get()) - ).get(); + // no roles are resolved because both role mapping stores are empty + for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { + PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); + userRoleMapper.resolveRoles( + new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), + resolveRolesFuture + ); + assertThat(resolveRolesFuture.get(), empty()); + } + } - assertNull( - clusterStateResponse.getState().metadata().persistentSettings().get(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey()) - ); + public void testRoleMappingApplyWithSecurityIndexClosed() throws Exception { + ensureGreen(); - ReservedStateMetadata reservedState = clusterStateResponse.getState() - .metadata() - .reservedStateMetadata() - .get(FileSettingsService.NAMESPACE); + // expect the role mappings to apply even if the .security index is closed + var savedClusterState = setupClusterStateListener(internalCluster().getMasterName(), "everyone_kibana"); - ReservedStateHandlerMetadata handlerMetadata = reservedState.handlers().get(ReservedRoleMappingAction.NAME); - assertTrue(handlerMetadata == null || handlerMetadata.keys().isEmpty()); + try { + var closeIndexResponse = indicesAdmin().close(new CloseIndexRequest(INTERNAL_SECURITY_MAIN_INDEX_7)).get(); + assertTrue(closeIndexResponse.isAcknowledged()); + + writeJSONFile(internalCluster().getMasterName(), testJSON, logger, versionCounter); + boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); + assertTrue(awaitSuccessful); + + // no native role mappings exist + var request = new GetRoleMappingsRequest(); + request.setNames("everyone_kibana", "everyone_fleet"); + var response = client().execute(GetRoleMappingsAction.INSTANCE, request).get(); + assertFalse(response.hasMappings()); + + // cluster state settings are also applied + var clusterStateResponse = clusterAdmin().state(new ClusterStateRequest().waitForMetadataVersion(savedClusterState.v2().get())) + .get(); + assertThat( + clusterStateResponse.getState().metadata().persistentSettings().get(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey()), + equalTo("50mb") + ); + + ReservedStateMetadata reservedState = clusterStateResponse.getState() + .metadata() + .reservedStateMetadata() + .get(FileSettingsService.NAMESPACE); + + ReservedStateHandlerMetadata handlerMetadata = reservedState.handlers().get(ReservedRoleMappingAction.NAME); + assertThat(handlerMetadata.keys(), containsInAnyOrder("everyone_kibana", "everyone_fleet")); + + // and roles are resolved based on the cluster-state role mappings + for (UserRoleMapper userRoleMapper : internalCluster().getInstances(UserRoleMapper.class)) { + PlainActionFuture> resolveRolesFuture = new PlainActionFuture<>(); + userRoleMapper.resolveRoles( + new UserRoleMapper.UserData("anyUsername", null, List.of(), Map.of(), mock(RealmConfig.class)), + resolveRolesFuture + ); + assertThat(resolveRolesFuture.get(), containsInAnyOrder("kibana_user", "fleet_user")); + } + } finally { + savedClusterState = setupClusterStateListenerForCleanup(internalCluster().getMasterName()); + writeJSONFile(internalCluster().getMasterName(), emptyJSON, logger, versionCounter); + boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); + assertTrue(awaitSuccessful); + + var openIndexResponse = indicesAdmin().open(new OpenIndexRequest(INTERNAL_SECURITY_MAIN_INDEX_7)).get(); + assertTrue(openIndexResponse.isAcknowledged()); + } } private PutRoleMappingRequest sampleRestRequest(String name) throws Exception { var json = """ { - "enabled": false, - "roles": [ "kibana_user" ], + "enabled": true, + "roles": [ "kibana_user_native" ], "rules": { "field": { "username": "*" } }, "metadata": { "uuid" : "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7" @@ -433,8 +490,7 @@ private PutRoleMappingRequest sampleRestRequest(String name) throws Exception { var bis = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); var parser = JSON.xContent().createParser(XContentParserConfiguration.EMPTY, bis) ) { - ExpressionRoleMapping mapping = ExpressionRoleMapping.parse(name, parser); - return PutRoleMappingRequest.fromMapping(mapping); + return new PutRoleMappingRequestBuilder(null).source(name, parser).request(); } } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsStartupIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsStartupIT.java deleted file mode 100644 index 48e97b7afb89..000000000000 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsStartupIT.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.security; - -import org.elasticsearch.analysis.common.CommonAnalysisPlugin; -import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.cluster.metadata.ReservedStateErrorMetadata; -import org.elasticsearch.cluster.metadata.ReservedStateMetadata; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.core.Strings; -import org.elasticsearch.core.Tuple; -import org.elasticsearch.index.mapper.extras.MapperExtrasPlugin; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.reindex.ReindexPlugin; -import org.elasticsearch.reservedstate.service.FileSettingsService; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.SecurityIntegTestCase; -import org.elasticsearch.test.junit.annotations.TestLogging; -import org.elasticsearch.transport.netty4.Netty4Plugin; -import org.elasticsearch.xpack.wildcard.Wildcard; - -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.StandardCopyOption; -import java.util.Arrays; -import java.util.Collection; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicLong; - -import static org.hamcrest.Matchers.allOf; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.notNullValue; - -@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, autoManageMasterNodes = false) -public class FileSettingsRoleMappingsStartupIT extends SecurityIntegTestCase { - - private static AtomicLong versionCounter = new AtomicLong(1); - private static String testJSONForFailedCase = """ - { - "metadata": { - "version": "%s", - "compatibility": "8.4.0" - }, - "state": { - "role_mappings": { - "everyone_kibana_2": { - "enabled": true, - "roles": [ "kibana_user" ], - "rules": { "field": { "username": "*" } }, - "metadata": { - "uuid" : "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", - "_foo": "something" - } - } - } - } - }"""; - - @Override - protected void doAssertXPackIsInstalled() {} - - @Override - protected Path nodeConfigPath(int nodeOrdinal) { - return null; - } - - private void writeJSONFile(String node, String json) throws Exception { - long version = versionCounter.incrementAndGet(); - - FileSettingsService fileSettingsService = internalCluster().getInstance(FileSettingsService.class, node); - - Files.deleteIfExists(fileSettingsService.watchedFile()); - - Files.createDirectories(fileSettingsService.watchedFileDir()); - Path tempFilePath = createTempFile(); - - logger.info("--> writing JSON config to node {} with path {}", node, tempFilePath); - logger.info(Strings.format(json, version)); - Files.write(tempFilePath, Strings.format(json, version).getBytes(StandardCharsets.UTF_8)); - Files.move(tempFilePath, fileSettingsService.watchedFile(), StandardCopyOption.ATOMIC_MOVE); - } - - private Tuple setupClusterStateListenerForError(String node) { - ClusterService clusterService = internalCluster().clusterService(node); - CountDownLatch savedClusterState = new CountDownLatch(1); - AtomicLong metadataVersion = new AtomicLong(-1); - clusterService.addListener(new ClusterStateListener() { - @Override - public void clusterChanged(ClusterChangedEvent event) { - ReservedStateMetadata reservedState = event.state().metadata().reservedStateMetadata().get(FileSettingsService.NAMESPACE); - if (reservedState != null && reservedState.errorMetadata() != null) { - assertEquals(ReservedStateErrorMetadata.ErrorKind.VALIDATION, reservedState.errorMetadata().errorKind()); - assertThat(reservedState.errorMetadata().errors(), allOf(notNullValue(), hasSize(1))); - assertThat(reservedState.errorMetadata().errors().get(0), containsString("Fake exception")); - clusterService.removeListener(this); - metadataVersion.set(event.state().metadata().version()); - savedClusterState.countDown(); - } else if (reservedState != null) { - logger.debug(() -> "Got reserved state update without error metadata: " + reservedState); - } else { - logger.debug(() -> "Got cluster state update: " + event.source()); - } - } - }); - - return new Tuple<>(savedClusterState, metadataVersion); - } - - @TestLogging( - value = "org.elasticsearch.common.file:DEBUG,org.elasticsearch.xpack.security:DEBUG,org.elasticsearch.cluster.metadata:DEBUG", - reason = "https://github.com/elastic/elasticsearch/issues/98391" - ) - public void testFailsOnStartMasterNodeWithError() throws Exception { - internalCluster().setBootstrapMasterNodeIndex(0); - - internalCluster().startMasterOnlyNode(); - - logger.info("--> write some role mappings, no other file settings"); - writeJSONFile(internalCluster().getMasterName(), testJSONForFailedCase); - var savedClusterState = setupClusterStateListenerForError(internalCluster().getMasterName()); - - boolean awaitSuccessful = savedClusterState.v1().await(20, TimeUnit.SECONDS); - assertTrue(awaitSuccessful); - } - - public Collection> nodePlugins() { - return Arrays.asList( - UnstableLocalStateSecurity.class, - Netty4Plugin.class, - ReindexPlugin.class, - CommonAnalysisPlugin.class, - InternalSettingsPlugin.class, - MapperExtrasPlugin.class, - Wildcard.class - ); - } - -} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index ef08f855a46c..0ff4f1160af5 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -1103,8 +1103,7 @@ Collection createComponents( new SecurityUsageServices(realms, allRolesStore, nativeRoleMappingStore, ipFilter.get(), profileService, apiKeyService) ); - reservedRoleMappingAction.set(new ReservedRoleMappingAction(nativeRoleMappingStore)); - systemIndices.getMainIndexManager().onStateRecovered(state -> reservedRoleMappingAction.get().securityIndexRecovered()); + reservedRoleMappingAction.set(new ReservedRoleMappingAction()); cacheInvalidatorRegistry.validate(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java index 852887767578..73d1a1abcdb5 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/ReservedRoleMappingAction.java @@ -7,24 +7,18 @@ package org.elasticsearch.xpack.security.action.rolemapping; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.support.GroupedActionListener; -import org.elasticsearch.common.util.concurrent.ListenableFuture; -import org.elasticsearch.reservedstate.NonStateTransformResult; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.reservedstate.ReservedClusterStateHandler; import org.elasticsearch.reservedstate.TransformState; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; -import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; +import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; -import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; +import org.elasticsearch.xpack.core.security.authz.RoleMappingMetadata; import java.io.IOException; import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -38,123 +32,59 @@ * It is used by the ReservedClusterStateService to add/update or remove role mappings. Typical usage * for this action is in the context of file based settings. */ -public class ReservedRoleMappingAction implements ReservedClusterStateHandler> { +public class ReservedRoleMappingAction implements ReservedClusterStateHandler> { public static final String NAME = "role_mappings"; - private final NativeRoleMappingStore roleMappingStore; - private final ListenableFuture securityIndexRecoveryListener = new ListenableFuture<>(); - - /** - * Creates a ReservedRoleMappingAction - * - * @param roleMappingStore requires {@link NativeRoleMappingStore} for storing/deleting the mappings - */ - public ReservedRoleMappingAction(NativeRoleMappingStore roleMappingStore) { - this.roleMappingStore = roleMappingStore; - } - @Override public String name() { return NAME; } - private static Collection prepare(List roleMappings) { - List requests = roleMappings.stream().map(rm -> PutRoleMappingRequest.fromMapping(rm)).toList(); - - var exceptions = new ArrayList(); - for (var request : requests) { - // File based defined role mappings are allowed to use MetadataUtils.RESERVED_PREFIX - var exception = request.validate(false); - if (exception != null) { - exceptions.add(exception); - } - } - - if (exceptions.isEmpty() == false) { - var illegalArgumentException = new IllegalArgumentException("error on validating put role mapping requests"); - exceptions.forEach(illegalArgumentException::addSuppressed); - throw illegalArgumentException; - } - - return requests; - } - @Override public TransformState transform(Object source, TransformState prevState) throws Exception { - // We execute the prepare() call to catch any errors in the transform phase. - // Since we store the role mappings outside the cluster state, we do the actual save with a - // non cluster state transform call. @SuppressWarnings("unchecked") - var requests = prepare((List) source); - return new TransformState( - prevState.state(), - prevState.keys(), - l -> securityIndexRecoveryListener.addListener( - ActionListener.wrap(ignored -> nonStateTransform(requests, prevState, l), l::onFailure) - ) - ); - } - - // Exposed for testing purposes - protected void nonStateTransform( - Collection requests, - TransformState prevState, - ActionListener listener - ) { - Set entities = requests.stream().map(r -> r.getName()).collect(Collectors.toSet()); - Set toDelete = new HashSet<>(prevState.keys()); - toDelete.removeAll(entities); - - final int tasksCount = requests.size() + toDelete.size(); - - // Nothing to do, don't start a group listener with 0 actions - if (tasksCount == 0) { - listener.onResponse(new NonStateTransformResult(ReservedRoleMappingAction.NAME, Set.of())); - return; - } - - GroupedActionListener taskListener = new GroupedActionListener<>(tasksCount, new ActionListener<>() { - @Override - public void onResponse(Collection booleans) { - listener.onResponse(new NonStateTransformResult(ReservedRoleMappingAction.NAME, Collections.unmodifiableSet(entities))); - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - }); - - for (var request : requests) { - roleMappingStore.putRoleMapping(request, taskListener); - } - - for (var mappingToDelete : toDelete) { - var deleteRequest = new DeleteRoleMappingRequest(); - deleteRequest.setName(mappingToDelete); - roleMappingStore.deleteRoleMapping(deleteRequest, taskListener); + Set roleMappings = validate((List) source); + RoleMappingMetadata newRoleMappingMetadata = new RoleMappingMetadata(roleMappings); + if (newRoleMappingMetadata.equals(RoleMappingMetadata.getFromClusterState(prevState.state()))) { + return prevState; + } else { + ClusterState newState = newRoleMappingMetadata.updateClusterState(prevState.state()); + Set entities = newRoleMappingMetadata.getRoleMappings() + .stream() + .map(ExpressionRoleMapping::getName) + .collect(Collectors.toSet()); + return new TransformState(newState, entities); } } @Override - public List fromXContent(XContentParser parser) throws IOException { - List result = new ArrayList<>(); - + public List fromXContent(XContentParser parser) throws IOException { + List result = new ArrayList<>(); Map source = parser.map(); - for (String name : source.keySet()) { @SuppressWarnings("unchecked") Map content = (Map) source.get(name); try (XContentParser mappingParser = mapToXContentParser(XContentParserConfiguration.EMPTY, content)) { - ExpressionRoleMapping mapping = ExpressionRoleMapping.parse(name, mappingParser); - result.add(mapping); + result.add(new PutRoleMappingRequestBuilder(null).source(name, mappingParser).request()); } } - return result; } - public void securityIndexRecovered() { - securityIndexRecoveryListener.onResponse(null); + private Set validate(List roleMappings) { + var exceptions = new ArrayList(); + for (var roleMapping : roleMappings) { + // File based defined role mappings are allowed to use MetadataUtils.RESERVED_PREFIX + var exception = roleMapping.validate(false); + if (exception != null) { + exceptions.add(exception); + } + } + if (exceptions.isEmpty() == false) { + var illegalArgumentException = new IllegalArgumentException("error on validating put role mapping requests"); + exceptions.forEach(illegalArgumentException::addSuppressed); + throw illegalArgumentException; + } + return roleMappings.stream().map(PutRoleMappingRequest::getMapping).collect(Collectors.toUnmodifiableSet()); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java index 811d357b89f8..b4e8d5d6db83 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java @@ -8,9 +8,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.ReservedStateAwareHandledTransportAction; -import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingAction; @@ -18,12 +18,7 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingResponse; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; -import java.util.Optional; -import java.util.Set; - -public class TransportDeleteRoleMappingAction extends ReservedStateAwareHandledTransportAction< - DeleteRoleMappingRequest, - DeleteRoleMappingResponse> { +public class TransportDeleteRoleMappingAction extends HandledTransportAction { private final NativeRoleMappingStore roleMappingStore; @@ -31,25 +26,20 @@ public class TransportDeleteRoleMappingAction extends ReservedStateAwareHandledT public TransportDeleteRoleMappingAction( ActionFilters actionFilters, TransportService transportService, - ClusterService clusterService, NativeRoleMappingStore roleMappingStore ) { - super(DeleteRoleMappingAction.NAME, clusterService, transportService, actionFilters, DeleteRoleMappingRequest::new); + super( + DeleteRoleMappingAction.NAME, + transportService, + actionFilters, + DeleteRoleMappingRequest::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); this.roleMappingStore = roleMappingStore; } @Override - protected void doExecuteProtected(Task task, DeleteRoleMappingRequest request, ActionListener listener) { + protected void doExecute(Task task, DeleteRoleMappingRequest request, ActionListener listener) { roleMappingStore.deleteRoleMapping(request, listener.safeMap(DeleteRoleMappingResponse::new)); } - - @Override - public Optional reservedStateHandlerName() { - return Optional.of(ReservedRoleMappingAction.NAME); - } - - @Override - public Set modifiedKeys(DeleteRoleMappingRequest request) { - return Set.of(request.getName()); - } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java index 5e32e4f903f8..44c72bc13a54 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java @@ -8,9 +8,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.ReservedStateAwareHandledTransportAction; -import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingAction; @@ -18,10 +18,7 @@ import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; -import java.util.Optional; -import java.util.Set; - -public class TransportPutRoleMappingAction extends ReservedStateAwareHandledTransportAction { +public class TransportPutRoleMappingAction extends HandledTransportAction { private final NativeRoleMappingStore roleMappingStore; @@ -29,32 +26,17 @@ public class TransportPutRoleMappingAction extends ReservedStateAwareHandledTran public TransportPutRoleMappingAction( ActionFilters actionFilters, TransportService transportService, - ClusterService clusterService, NativeRoleMappingStore roleMappingStore ) { - super(PutRoleMappingAction.NAME, clusterService, transportService, actionFilters, PutRoleMappingRequest::new); + super(PutRoleMappingAction.NAME, transportService, actionFilters, PutRoleMappingRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); this.roleMappingStore = roleMappingStore; } @Override - protected void doExecuteProtected( - Task task, - final PutRoleMappingRequest request, - final ActionListener listener - ) { + protected void doExecute(Task task, final PutRoleMappingRequest request, final ActionListener listener) { roleMappingStore.putRoleMapping( request, ActionListener.wrap(created -> listener.onResponse(new PutRoleMappingResponse(created)), listener::onFailure) ); } - - @Override - public Optional reservedStateHandlerName() { - return Optional.of(ReservedRoleMappingAction.NAME); - } - - @Override - public Set modifiedKeys(PutRoleMappingRequest request) { - return Set.of(request.getName()); - } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java index e7e24037543f..55562c8ee013 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/rolemapping/RestPutRoleMappingAction.java @@ -8,6 +8,8 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestRequest; @@ -17,6 +19,7 @@ import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestBuilderListener; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; @@ -57,12 +60,18 @@ public String getName() { @Override public RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { - final String name = request.param("name"); - PutRoleMappingRequestBuilder requestBuilder = new PutRoleMappingRequestBuilder(client).source( - name, - request.requiredContent(), - request.getXContentType() - ).setRefreshPolicy(request.param("refresh")); + String name = request.param("name"); + String refresh = request.param("refresh"); + PutRoleMappingRequestBuilder requestBuilder; + try ( + XContentParser parser = XContentHelper.createParserNotCompressed( + LoggingDeprecationHandler.XCONTENT_PARSER_CONFIG, + request.requiredContent(), + request.getXContentType() + ) + ) { + requestBuilder = new PutRoleMappingRequestBuilder(client).source(name, parser).setRefreshPolicy(refresh); + } return channel -> requestBuilder.execute(new RestBuilderListener<>(channel) { @Override public RestResponse buildResponse(PutRoleMappingResponse response, XContentBuilder builder) throws Exception { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalReservedUnstableSecurityStateHandlerProvider.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalReservedUnstableSecurityStateHandlerProvider.java deleted file mode 100644 index b4a07093e49c..000000000000 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalReservedUnstableSecurityStateHandlerProvider.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.security; - -import org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider; - -/** - * Mock Security Provider implementation for the {@link ReservedClusterStateHandlerProvider} service interface. This is used - * for {@link org.elasticsearch.test.ESIntegTestCase} because the Security Plugin is really LocalStateSecurity in those tests. - *

- * Unlike {@link LocalReservedSecurityStateHandlerProvider} this implementation is mocked to implement the - * {@link UnstableLocalStateSecurity}. Separate implementation is needed, because the SPI creation code matches the constructor - * signature when instantiating. E.g. we need to match {@link UnstableLocalStateSecurity} instead of {@link LocalStateSecurity} - */ -public class LocalReservedUnstableSecurityStateHandlerProvider extends LocalReservedSecurityStateHandlerProvider { - public LocalReservedUnstableSecurityStateHandlerProvider() { - throw new IllegalStateException("Provider must be constructed using PluginsService"); - } - - public LocalReservedUnstableSecurityStateHandlerProvider(UnstableLocalStateSecurity plugin) { - super(plugin); - } -} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/UnstableLocalStateSecurity.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/UnstableLocalStateSecurity.java deleted file mode 100644 index 5621bdced15b..000000000000 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/UnstableLocalStateSecurity.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.security; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.reservedstate.NonStateTransformResult; -import org.elasticsearch.reservedstate.ReservedClusterStateHandler; -import org.elasticsearch.reservedstate.TransformState; -import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; -import org.elasticsearch.xpack.core.ssl.SSLService; -import org.elasticsearch.xpack.security.action.rolemapping.ReservedRoleMappingAction; - -import java.nio.file.Path; -import java.util.Collection; -import java.util.List; -import java.util.Optional; - -/** - * A test class that allows us to Inject new type of Reserved Handler that can - * simulate errors in saving role mappings. - *

- * We can't use our regular path to simply make an extension of LocalStateSecurity - * in an integration test class, because the reserved handlers are injected through - * SPI. (see {@link LocalReservedUnstableSecurityStateHandlerProvider}) - */ -public final class UnstableLocalStateSecurity extends LocalStateSecurity { - - public UnstableLocalStateSecurity(Settings settings, Path configPath) throws Exception { - super(settings, configPath); - // We reuse most of the initialization of LocalStateSecurity, we then just overwrite - // the security plugin with an extra method to give us a fake RoleMappingAction. - Optional security = plugins.stream().filter(p -> p instanceof Security).findFirst(); - if (security.isPresent()) { - plugins.remove(security.get()); - } - - UnstableLocalStateSecurity thisVar = this; - var action = new ReservedUnstableRoleMappingAction(); - - plugins.add(new Security(settings, super.securityExtensions()) { - @Override - protected SSLService getSslService() { - return thisVar.getSslService(); - } - - @Override - protected XPackLicenseState getLicenseState() { - return thisVar.getLicenseState(); - } - - @Override - List> reservedClusterStateHandlers() { - // pretend the security index is initialized after 2 seconds - var timer = new java.util.Timer(); - timer.schedule(new java.util.TimerTask() { - @Override - public void run() { - action.securityIndexRecovered(); - timer.cancel(); - } - }, 2_000); - return List.of(action); - } - }); - } - - public static class ReservedUnstableRoleMappingAction extends ReservedRoleMappingAction { - /** - * Creates a fake ReservedRoleMappingAction that doesn't actually use the role mapping store - */ - public ReservedUnstableRoleMappingAction() { - // we don't actually need a NativeRoleMappingStore - super(null); - } - - /** - * The nonStateTransform method is the only one that uses the native store, we simply pretend - * something has called the onFailure method of the listener. - */ - @Override - protected void nonStateTransform( - Collection requests, - TransformState prevState, - ActionListener listener - ) { - listener.onFailure(new IllegalStateException("Fake exception")); - } - } -} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/reservedstate/ReservedRoleMappingActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/reservedstate/ReservedRoleMappingActionTests.java index 6cdca0cb3b24..cac7c91f73ed 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/reservedstate/ReservedRoleMappingActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/reservedstate/ReservedRoleMappingActionTests.java @@ -7,77 +7,40 @@ package org.elasticsearch.xpack.security.action.reservedstate; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.reservedstate.NonStateTransformResult; import org.elasticsearch.reservedstate.TransformState; -import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.security.action.rolemapping.ReservedRoleMappingAction; -import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; -import org.elasticsearch.xpack.security.support.SecurityIndexManager; import java.util.Collections; -import java.util.Set; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.atomic.AtomicReference; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; +import static org.hamcrest.Matchers.nullValue; /** * Tests that the ReservedRoleMappingAction does validation, can add and remove role mappings */ public class ReservedRoleMappingActionTests extends ESTestCase { + private TransformState processJSON(ReservedRoleMappingAction action, TransformState prevState, String json) throws Exception { try (XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, json)) { var content = action.fromXContent(parser); var state = action.transform(content, prevState); - - CountDownLatch latch = new CountDownLatch(1); - AtomicReference> updatedKeys = new AtomicReference<>(); - AtomicReference error = new AtomicReference<>(); - state.nonStateTransform().accept(new ActionListener<>() { - @Override - public void onResponse(NonStateTransformResult nonStateTransformResult) { - updatedKeys.set(nonStateTransformResult.updatedKeys()); - latch.countDown(); - } - - @Override - public void onFailure(Exception e) { - error.set(e); - latch.countDown(); - } - }); - - latch.await(); - if (error.get() != null) { - throw error.get(); - } - return new TransformState(state.state(), updatedKeys.get()); + assertThat(state.nonStateTransform(), nullValue()); + return state; } } public void testValidation() { - var nativeRoleMappingStore = mockNativeRoleMappingStore(); - ClusterState state = ClusterState.builder(new ClusterName("elasticsearch")).build(); TransformState prevState = new TransformState(state, Collections.emptySet()); - ReservedRoleMappingAction action = new ReservedRoleMappingAction(nativeRoleMappingStore); - action.securityIndexRecovered(); - + ReservedRoleMappingAction action = new ReservedRoleMappingAction(); String badPolicyJSON = """ { "everyone_kibana": { @@ -97,7 +60,6 @@ public void testValidation() { } } }"""; - assertEquals( "failed to parse role-mapping [everyone_fleet]. missing field [rules]", expectThrows(ParsingException.class, () -> processJSON(action, prevState, badPolicyJSON)).getMessage() @@ -105,13 +67,9 @@ public void testValidation() { } public void testAddRemoveRoleMapping() throws Exception { - var nativeRoleMappingStore = mockNativeRoleMappingStore(); - ClusterState state = ClusterState.builder(new ClusterName("elasticsearch")).build(); TransformState prevState = new TransformState(state, Collections.emptySet()); - ReservedRoleMappingAction action = new ReservedRoleMappingAction(nativeRoleMappingStore); - action.securityIndexRecovered(); - + ReservedRoleMappingAction action = new ReservedRoleMappingAction(); String emptyJSON = ""; TransformState updatedState = processJSON(action, prevState, emptyJSON); @@ -147,102 +105,4 @@ public void testAddRemoveRoleMapping() throws Exception { updatedState = processJSON(action, prevState, emptyJSON); assertThat(updatedState.keys(), empty()); } - - @SuppressWarnings("unchecked") - public void testNonStateTransformWaitsOnAsyncActions() throws Exception { - var nativeRoleMappingStore = mockNativeRoleMappingStore(); - - doAnswer(invocation -> { - new Thread(() -> { - // Simulate put role mapping async action taking a while - try { - Thread.sleep(1_000); - ((ActionListener) invocation.getArgument(1)).onFailure(new IllegalStateException("err_done")); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }).start(); - - return null; - }).when(nativeRoleMappingStore).putRoleMapping(any(), any()); - - doAnswer(invocation -> { - new Thread(() -> { - // Simulate delete role mapping async action taking a while - try { - Thread.sleep(1_000); - ((ActionListener) invocation.getArgument(1)).onFailure(new IllegalStateException("err_done")); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - }).start(); - - return null; - }).when(nativeRoleMappingStore).deleteRoleMapping(any(), any()); - - ClusterState state = ClusterState.builder(new ClusterName("elasticsearch")).build(); - TransformState updatedState = new TransformState(state, Collections.emptySet()); - ReservedRoleMappingAction action = new ReservedRoleMappingAction(nativeRoleMappingStore); - action.securityIndexRecovered(); - - String json = """ - { - "everyone_kibana": { - "enabled": true, - "roles": [ "kibana_user" ], - "rules": { "field": { "username": "*" } }, - "metadata": { - "uuid" : "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", - "_reserved": true - } - }, - "everyone_fleet": { - "enabled": true, - "roles": [ "fleet_user" ], - "rules": { "field": { "username": "*" } }, - "metadata": { - "uuid" : "a9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", - "_reserved": true - } - } - }"""; - - assertEquals( - "err_done", - expectThrows(IllegalStateException.class, () -> processJSON(action, new TransformState(state, Collections.emptySet()), json)) - .getMessage() - ); - - // Now that we've tested that we wait on putRoleMapping correctly, let it finish without exception, so we can test error on delete - doAnswer(invocation -> { - ((ActionListener) invocation.getArgument(1)).onResponse(true); - return null; - }).when(nativeRoleMappingStore).putRoleMapping(any(), any()); - - updatedState = processJSON(action, updatedState, json); - assertThat(updatedState.keys(), containsInAnyOrder("everyone_kibana", "everyone_fleet")); - - final TransformState currentState = new TransformState(updatedState.state(), updatedState.keys()); - - assertEquals("err_done", expectThrows(IllegalStateException.class, () -> processJSON(action, currentState, "")).getMessage()); - } - - @SuppressWarnings("unchecked") - private NativeRoleMappingStore mockNativeRoleMappingStore() { - final NativeRoleMappingStore nativeRoleMappingStore = spy( - new NativeRoleMappingStore(Settings.EMPTY, mock(Client.class), mock(SecurityIndexManager.class), mock(ScriptService.class)) - ); - - doAnswer(invocation -> { - ((ActionListener) invocation.getArgument(1)).onResponse(true); - return null; - }).when(nativeRoleMappingStore).putRoleMapping(any(), any()); - - doAnswer(invocation -> { - ((ActionListener) invocation.getArgument(1)).onResponse(true); - return null; - }).when(nativeRoleMappingStore).deleteRoleMapping(any(), any()); - - return nativeRoleMappingStore; - } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingActionTests.java deleted file mode 100644 index 038e673e0786..000000000000 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingActionTests.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.security.action.rolemapping; - -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.Transport; -import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest; -import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; - -import java.util.Collections; - -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.mockito.Mockito.mock; - -public class TransportDeleteRoleMappingActionTests extends ESTestCase { - public void testReservedStateHandler() { - var store = mock(NativeRoleMappingStore.class); - TransportService transportService = new TransportService( - Settings.EMPTY, - mock(Transport.class), - mock(ThreadPool.class), - TransportService.NOOP_TRANSPORT_INTERCEPTOR, - x -> null, - null, - Collections.emptySet() - ); - var action = new TransportDeleteRoleMappingAction(mock(ActionFilters.class), transportService, mock(ClusterService.class), store); - - assertEquals(ReservedRoleMappingAction.NAME, action.reservedStateHandlerName().get()); - - var deleteRequest = new DeleteRoleMappingRequest(); - deleteRequest.setName("kibana_all"); - assertThat(action.modifiedKeys(deleteRequest), containsInAnyOrder("kibana_all")); - } -} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java index 58a8e8e3d475..6f789a10a3a6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java @@ -9,16 +9,12 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentParserConfiguration; -import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; @@ -33,7 +29,6 @@ import static org.hamcrest.Matchers.aMapWithSize; import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.iterableWithSize; @@ -60,7 +55,7 @@ public void setupMocks() { null, Collections.emptySet() ); - action = new TransportPutRoleMappingAction(mock(ActionFilters.class), transportService, mock(ClusterService.class), store); + action = new TransportPutRoleMappingAction(mock(ActionFilters.class), transportService, store); requestRef = new AtomicReference<>(null); @@ -99,39 +94,7 @@ private PutRoleMappingResponse put(String name, FieldExpression expression, Stri request.setMetadata(metadata); request.setEnabled(true); final PlainActionFuture future = new PlainActionFuture<>(); - action.doExecuteProtected(mock(Task.class), request, future); + action.doExecute(mock(Task.class), request, future); return future.get(); } - - public void testReservedStateHandler() throws Exception { - assertEquals(ReservedRoleMappingAction.NAME, action.reservedStateHandlerName().get()); - String json = """ - { - "everyone_kibana": { - "enabled": true, - "roles": [ "kibana_user" ], - "rules": { "field": { "username": "*" } }, - "metadata": { - "uuid" : "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7" - } - }, - "everyone_fleet": { - "enabled": true, - "roles": [ "fleet_user" ], - "rules": { "field": { "username": "*" } }, - "metadata": { - "uuid" : "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7" - } - } - }"""; - - try (XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, json)) { - ReservedRoleMappingAction roleMappingAction = new ReservedRoleMappingAction(store); - var parsedResult = roleMappingAction.fromXContent(parser); - - for (var mapping : parsedResult) { - assertThat(action.modifiedKeys(PutRoleMappingRequest.fromMapping(mapping)), containsInAnyOrder(mapping.getName())); - } - } - } } diff --git a/x-pack/plugin/security/src/test/resources/META-INF/services/org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider b/x-pack/plugin/security/src/test/resources/META-INF/services/org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider index 77c38d302d9c..3d17572429ba 100644 --- a/x-pack/plugin/security/src/test/resources/META-INF/services/org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider +++ b/x-pack/plugin/security/src/test/resources/META-INF/services/org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider @@ -6,4 +6,3 @@ # org.elasticsearch.xpack.security.LocalReservedSecurityStateHandlerProvider -org.elasticsearch.xpack.security.LocalReservedUnstableSecurityStateHandlerProvider diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java index 3d9e7f3828bc..17363d58545c 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java @@ -20,11 +20,14 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.ssl.SslVerificationMode; import org.elasticsearch.common.util.Maps; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.fixtures.smb.SmbTestContainer; import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; +import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; @@ -187,11 +190,16 @@ public void setupRoleMappings() throws Exception { Map> futures = Maps.newLinkedHashMapWithExpectedSize(content.size()); for (int i = 0; i < content.size(); i++) { final String name = "external_" + i; - final PutRoleMappingRequestBuilder builder = new PutRoleMappingRequestBuilder(client()).source( - name, - new BytesArray(content.get(i)), - XContentType.JSON - ); + final PutRoleMappingRequestBuilder builder; + try ( + XContentParser parser = XContentHelper.createParserNotCompressed( + LoggingDeprecationHandler.XCONTENT_PARSER_CONFIG, + new BytesArray(content.get(i)), + XContentType.JSON + ) + ) { + builder = new PutRoleMappingRequestBuilder(client()).source(name, parser); + } futures.put(name, builder.execute()); } for (String mappingName : futures.keySet()) { From c0d419d3353b5f8762b0a5a819fcda4c74b02119 Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Tue, 14 May 2024 08:37:17 +0200 Subject: [PATCH 112/119] Remove obsolete YAML test cases (#108357) --- .../indices.put_settings/all_path_options.yml | 18 ----------- .../test/update/85_fields_meta.yml | 30 ------------------- 2 files changed, 48 deletions(-) delete mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/85_fields_meta.yml diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_settings/all_path_options.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_settings/all_path_options.yml index ae3eadded108..86f02641d86f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_settings/all_path_options.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_settings/all_path_options.yml @@ -78,24 +78,6 @@ setup: - match: {test_index2.settings.index.refresh_interval: 10s} - is_false: foo.settings.index.refresh_interval ---- -"put settings in list of indices": - - skip: - awaits_fix: list of indices not implemented yet - - do: - indices.put_settings: - index: test_index1, test_index2 - body: - refresh_interval: 10s - - - do: - indices.get_settings: {} - - - match: {test_index1.settings.index.refresh_interval: 10s} - - match: {test_index2.settings.index.refresh_interval: 10s} - - is_false: foo.settings.index.refresh_interval - - --- "put settings in blank index": - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/85_fields_meta.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/85_fields_meta.yml deleted file mode 100644 index 81be6f82d8a1..000000000000 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/85_fields_meta.yml +++ /dev/null @@ -1,30 +0,0 @@ ---- -"Metadata Fields": - - - skip: - awaits_fix: "Update doesn't return metadata fields, waiting for #3259" - - - do: - indices.create: - index: test_1 - - - do: - update: - index: test_1 - id: "1" - parent: 5 - fields: [ _routing ] - body: - doc: { foo: baz } - upsert: { foo: bar } - - - match: { get._routing: "5" } - - - do: - get: - index: test_1 - id: "1" - parent: 5 - stored_fields: [ _routing ] - - From bac60016218f6da5df9b460fc247ebad33de4f2f Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Tue, 14 May 2024 08:37:54 +0200 Subject: [PATCH 113/119] Remove test case awaiting fix without bug url (#108358) --- .../rest-api-spec/test/rollup/get_jobs.yml | 126 ------------------ 1 file changed, 126 deletions(-) diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_jobs.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_jobs.yml index bd40e29d0b67..671fb2471563 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_jobs.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/rollup/get_jobs.yml @@ -98,129 +98,3 @@ setup: - match: jobs: [] - ---- -"Test get all jobs": - - - skip: - awaits_fix: "Job ordering isn't guaranteed right now, cannot test" - - - do: - headers: - Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser - rollup.put_job: - id: foo - body: > - { - "index_pattern": "foo", - "rollup_index": "foo_rollup", - "cron": "*/30 * * * * ?", - "page_size" :10, - "groups" : { - "date_histogram": { - "field": "the_field", - "calendar_interval": "1h" - } - }, - "metrics": [ - { - "field": "value_field", - "metrics": ["min", "max", "sum"] - } - ] - } - - is_true: acknowledged - - - do: - headers: - Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser - rollup.put_job: - id: bar - body: > - { - "index_pattern": "bar", - "rollup_index": "foo_rollup", - "cron": "*/30 * * * * ?", - "page_size" :10, - "groups" : { - "date_histogram": { - "field": "the_field", - "calendar_interval": "1h" - } - }, - "metrics": [ - { - "field": "value_field", - "metrics": ["min", "max", "sum"] - } - ] - } - - is_true: acknowledged - - - do: - rollup.get_jobs: - id: "_all" - - - length: { jobs: 2 } - - match: - jobs: - - config: - id: "foo" - index_pattern: "foo" - rollup_index: "foo_rollup" - cron: "*/30 * * * * ?" - page_size: 10 - groups : - date_histogram: - calendar_interval: "1h" - field: "the_field" - time_zone: "UTC" - metrics: - - field: "value_field" - metrics: - - "min" - - "max" - - "sum" - timeout: "20s" - stats: - pages_processed: 0 - documents_processed: 0 - rollups_indexed: 0 - trigger_count: 0 - status: - job_state: "stopped" - - config: - id: "bar" - index_pattern: "bar" - rollup_index: "foo_rollup" - cron: "*/30 * * * * ?" - page_size: 10 - groups : - date_histogram: - calendar_interval: "1h" - field: "the_field" - time_zone: "UTC" - metrics: - - field: "value_field" - metrics: - - "min" - - "max" - - "sum" - timeout: "20s" - stats: - pages_processed: 0 - documents_processed: 0 - rollups_indexed: 0 - trigger_count: 0 - search_failures: 0 - index_failures: 0 - index_time_in_ms: 0 - index_total: 0 - search_time_in_ms: 0 - search_total: 0 - processing_time_in_ms: 0 - processing_total: 0 - status: - job_state: "stopped" - - From 0b1d71e7c8ddbc2d93267ddb0e805c382861c2c2 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 14 May 2024 09:23:47 +0200 Subject: [PATCH 114/119] Simplify instantiating SyntheticFieldLoader instances (#108247) * Exclude SourceLoader.SyntheticFieldLoader.NOTHING loaders. These have nothing to add. Most meta field mappers return this instance. Only doc count field mapper returns a real synthetic loader. * Only concat streams in root object mapper and not for any other object mapper. --- .../org/elasticsearch/index/mapper/Mapping.java | 4 +++- .../elasticsearch/index/mapper/ObjectMapper.java | 16 +++++++--------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java b/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java index b5de3971fa09..acfe0fcfbf5b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/Mapping.java @@ -21,6 +21,7 @@ import java.util.Comparator; import java.util.HashMap; import java.util.Map; +import java.util.stream.Stream; /** * Wrapper around everything that defines a mapping, without references to @@ -125,7 +126,8 @@ private boolean isSourceSynthetic() { } public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { - return root.syntheticFieldLoader(Arrays.stream(metadataMappers)); + var stream = Stream.concat(Stream.of(metadataMappers), root.mappers.values().stream()); + return root.syntheticFieldLoader(stream); } /** diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java index 57572cd3617f..6336e6ca0b76 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java @@ -751,19 +751,17 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep } - public SourceLoader.SyntheticFieldLoader syntheticFieldLoader(Stream extra) { - return new SyntheticSourceFieldLoader( - Stream.concat(extra, mappers.values().stream()) - .sorted(Comparator.comparing(Mapper::name)) - .map(Mapper::syntheticFieldLoader) - .filter(l -> l != null) - .toList() - ); + public SourceLoader.SyntheticFieldLoader syntheticFieldLoader(Stream mappers) { + var fields = mappers.sorted(Comparator.comparing(Mapper::name)) + .map(Mapper::syntheticFieldLoader) + .filter(l -> l != SourceLoader.SyntheticFieldLoader.NOTHING) + .toList(); + return new SyntheticSourceFieldLoader(fields); } @Override public SourceLoader.SyntheticFieldLoader syntheticFieldLoader() { - return syntheticFieldLoader(Stream.empty()); + return syntheticFieldLoader(mappers.values().stream()); } private class SyntheticSourceFieldLoader implements SourceLoader.SyntheticFieldLoader { From 77ce60530ceb562920bd0ccaa424f1ec58ce781b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Slobodan=20Adamovi=C4=87?= Date: Tue, 14 May 2024 10:16:55 +0200 Subject: [PATCH 115/119] [docs] Document new role description field (#108422) This commit updates Role API docs to include new description field (introduced in #107088) and adds descriptions for all built-in roles. --- docs/build.gradle | 1 + .../rest-api/security/create-roles.asciidoc | 4 + .../rest-api/security/get-roles.asciidoc | 1 + .../KibanaOwnedReservedRoleDescriptors.java | 14 +- .../authz/store/ReservedRolesStore.java | 243 ++++++++++++++++-- .../security/authc/ApiKeyIntegTests.java | 4 +- .../xpack/security/authc/ApiKeyService.java | 8 +- .../security/authc/ApiKeyServiceTests.java | 25 +- 8 files changed, 250 insertions(+), 50 deletions(-) diff --git a/docs/build.gradle b/docs/build.gradle index 0eba980e8cc3..7ca4820eea1a 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -1752,6 +1752,7 @@ setups['setup-snapshots'] = setups['setup-repository'] + ''' name: "my_admin_role" body: > { + "description": "Grants full access to all management features within the cluster.", "cluster": ["all"], "indices": [ {"names": ["index1", "index2" ], "privileges": ["all"], "field_security" : {"grant" : [ "title", "body" ]}} diff --git a/docs/reference/rest-api/security/create-roles.asciidoc b/docs/reference/rest-api/security/create-roles.asciidoc index 4f41c0b54bb1..75f1d7c79918 100644 --- a/docs/reference/rest-api/security/create-roles.asciidoc +++ b/docs/reference/rest-api/security/create-roles.asciidoc @@ -50,6 +50,9 @@ privilege or action. `cluster`:: (list) A list of cluster privileges. These privileges define the cluster level actions that users with this role are able to execute. +`description`:: (string) A description of the role. +The maximum length is `1000` chars. + `global`:: (object) An object defining global privileges. A global privilege is a form of cluster privilege that is request-aware. Support for global privileges is currently limited to the management of application privileges. @@ -104,6 +107,7 @@ The following example adds a role called `my_admin_role`: -------------------------------------------------- POST /_security/role/my_admin_role { + "description": "Grants full access to all management features within the cluster.", "cluster": ["all"], "indices": [ { diff --git a/docs/reference/rest-api/security/get-roles.asciidoc b/docs/reference/rest-api/security/get-roles.asciidoc index 80f0fd587aae..3eb5a735194c 100644 --- a/docs/reference/rest-api/security/get-roles.asciidoc +++ b/docs/reference/rest-api/security/get-roles.asciidoc @@ -61,6 +61,7 @@ GET /_security/role/my_admin_role -------------------------------------------------- { "my_admin_role": { + "description": "Grants full access to all management features within the cluster.", "cluster" : [ "all" ], "indices" : [ { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java index 49be4c5d466b..eb4b7efdb88b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java @@ -49,7 +49,11 @@ static RoleDescriptor kibanaAdminUser(String name, Map metadata) null, null, metadata, - null + null, + null, + null, + null, + "Grants access to all features in Kibana." ); } @@ -408,7 +412,13 @@ static RoleDescriptor kibanaSystem(String name) { getRemoteIndicesReadPrivileges("traces-apm-*") }, null, null, - null + "Grants access necessary for the Kibana system user to read from and write to the Kibana indices, " + + "manage index templates and tokens, and check the availability of the Elasticsearch cluster. " + + "It also permits activating, searching, and retrieving user profiles, " + + "as well as updating user profile data for the kibana-* namespace. " + + "Additionally, this role grants read access to the .monitoring-* indices " + + "and read and write access to the .reporting-* indices. " + + "Note: This role should not be assigned to users as the granted permissions may change between releases." ); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java index dd8f34a60fa1..2e7a5271103f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java @@ -103,7 +103,11 @@ public class ReservedRolesStore implements BiConsumer, ActionListene ) ), null, - null + "Grants full access to cluster management and data indices. " + + "This role also grants direct read-only access to restricted indices like .security. " + + "A user with this role can impersonate any other user in the system, " + + "manage security and create roles with unlimited privileges. " + + "Take extra care when assigning it to a user." ); private static final Map ALL_RESERVED_ROLES = initializeReservedRoles(); @@ -203,7 +207,12 @@ private static Map initializeReservedRoles() { getRemoteIndicesReadPrivileges("metricbeat-*") }, null, null, - null + "Grants the minimum privileges required for any user of X-Pack monitoring other than those required to use Kibana. " + + "This role grants access to the monitoring indices and grants privileges necessary " + + "for reading basic cluster information. " + + "This role also includes all Kibana privileges for the Elastic Stack monitoring features. " + + "Monitoring users should also be assigned the kibana_admin role, " + + "or another role with access to the Kibana instance." ) ), entry( @@ -232,7 +241,16 @@ private static Map initializeReservedRoles() { ) .build() }, null, - MetadataUtils.DEFAULT_RESERVED_METADATA + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants the minimum privileges required to write data into the monitoring indices (.monitoring-*). " + + "This role also has the privileges necessary to create Metricbeat indices (metricbeat-*) " + + "and write data into them." ) ), entry( @@ -251,7 +269,11 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, - null + null, + null, + null, + null, + "Grants the minimum privileges required to collect monitoring data for the Elastic Stack." ) ), entry( @@ -261,7 +283,14 @@ private static Map initializeReservedRoles() { new String[] { "manage_index_templates", "manage_pipeline" }, null, null, - MetadataUtils.DEFAULT_RESERVED_METADATA + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants access to manage all index templates and all ingest pipeline configurations." ) ), // reporting_user doesn't have any privileges in Elasticsearch, and Kibana authorizes privileges based on this role @@ -275,7 +304,14 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.getDeprecatedReservedMetadata("Please use Kibana feature privileges instead"), - null + null, + null, + null, + null, + "Grants the specific privileges required for users of X-Pack reporting other than those required to use Kibana. " + + "This role grants access to the reporting indices; each user has access to only their own reports. " + + "Reporting users should also be assigned additional roles that grant access to Kibana as well as read access " + + "to the indices that will be used to generate reports." ) ), entry(KibanaSystemUser.ROLE_NAME, kibanaSystemRoleDescriptor(KibanaSystemUser.ROLE_NAME)), @@ -286,7 +322,15 @@ private static Map initializeReservedRoles() { new String[] { "monitor", MonitoringBulkAction.NAME }, null, null, - MetadataUtils.DEFAULT_RESERVED_METADATA + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants access necessary for the Logstash system user to send system-level data (such as monitoring) to Elasticsearch. " + + "This role should not be assigned to users as the granted permissions may change between releases." ) ), entry( @@ -297,7 +341,14 @@ private static Map initializeReservedRoles() { new RoleDescriptor.IndicesPrivileges[] { RoleDescriptor.IndicesPrivileges.builder().indices(".management-beats").privileges("all").build() }, null, - MetadataUtils.DEFAULT_RESERVED_METADATA + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants access to the .management-beats index, which contains configuration information for the Beats." ) ), entry( @@ -311,7 +362,15 @@ private static Map initializeReservedRoles() { .privileges("create_index", "create") .build() }, null, - MetadataUtils.DEFAULT_RESERVED_METADATA + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants access necessary for the Beats system user to send system-level data (such as monitoring) to Elasticsearch. " + + "This role should not be assigned to users as the granted permissions may change between releases." ) ), entry( @@ -325,7 +384,14 @@ private static Map initializeReservedRoles() { .privileges("create_index", "create_doc") .build() }, null, - MetadataUtils.DEFAULT_RESERVED_METADATA + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants access necessary for the APM system user to send system-level data (such as monitoring) to Elasticsearch.\n" ) ), entry( @@ -381,7 +447,12 @@ private static Map initializeReservedRoles() { MetadataUtils.getDeprecatedReservedMetadata( "This role will be removed in a future major release. Please use editor and viewer roles instead" ), - null + null, + null, + null, + null, + "Grants the privileges required for APM users (such as read and view_index_metadata privileges " + + "on the apm-* and .ml-anomalies* indices)." ) ), entry( @@ -394,7 +465,11 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, - null + null, + null, + null, + null, + "Grants access necessary to manage inference models and performing inference." ) ), entry( @@ -407,7 +482,11 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, - null + null, + null, + null, + null, + "Grants access necessary to perform inference." ) ), entry( @@ -440,7 +519,15 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, - null + null, + null, + null, + null, + "Grants the minimum privileges required to view machine learning configuration, status, and work with results. " + + "This role grants monitor_ml cluster privileges, read access to the .ml-notifications and .ml-anomalies* indices " + + "(which store machine learning results), and write access to .ml-annotations* indices. " + + "Machine learning users also need index privileges for source and destination indices " + + "and roles that grant access to Kibana. " ) ), entry( @@ -474,7 +561,15 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, - null + null, + null, + null, + null, + "Provides all of the privileges of the machine_learning_user role plus the full use of the machine learning APIs. " + + "Grants manage_ml cluster privileges, read access to .ml-anomalies*, .ml-notifications*, .ml-state*, " + + ".ml-meta* indices and write access to .ml-annotations* indices. " + + "Machine learning administrators also need index privileges for source and destination indices " + + "and roles that grant access to Kibana." ) ), // DEPRECATED: to be removed in 9.0.0 @@ -501,7 +596,12 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.getDeprecatedReservedMetadata("Please use the [transform_admin] role instead"), - null + null, + null, + null, + null, + "Grants manage_data_frame_transforms cluster privileges, which enable you to manage transforms. " + + "This role also includes all Kibana privileges for the machine learning features." ) ), // DEPRECATED: to be removed in 9.0.0 @@ -528,7 +628,12 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.getDeprecatedReservedMetadata("Please use the [transform_user] role instead"), - null + null, + null, + null, + null, + "Grants monitor_data_frame_transforms cluster privileges, which enable you to use transforms. " + + "This role also includes all Kibana privileges for the machine learning features. " ) ), entry( @@ -549,7 +654,12 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, - null + null, + null, + null, + null, + "Grants manage_transform cluster privileges, which enable you to manage transforms. " + + "This role also includes all Kibana privileges for the machine learning features." ) ), entry( @@ -570,7 +680,12 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, - null + null, + null, + null, + null, + "Grants monitor_transform cluster privileges, which enable you to perform read-only operations related to " + + "transforms. This role also includes all Kibana privileges for the machine learning features." ) ), entry( @@ -585,7 +700,16 @@ private static Map initializeReservedRoles() { .allowRestrictedIndices(true) .build() }, null, - MetadataUtils.DEFAULT_RESERVED_METADATA + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Allows users to create and execute all Watcher actions. " + + "Grants read access to the .watches index. Also grants read access " + + "to the watch history and the triggered watches index." ) ), entry( @@ -604,7 +728,14 @@ private static Map initializeReservedRoles() { .privileges("read") .build() }, null, - MetadataUtils.DEFAULT_RESERVED_METADATA + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants read access to the .watches index, the get watch action and the watcher stats." ) ), entry( @@ -619,16 +750,50 @@ private static Map initializeReservedRoles() { .allowRestrictedIndices(true) .build() }, null, - MetadataUtils.DEFAULT_RESERVED_METADATA + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants access to the .logstash* indices for managing configurations, " + + "and grants necessary access for logstash-specific APIs exposed by the logstash x-pack plugin." ) ), entry( "rollup_user", - new RoleDescriptor("rollup_user", new String[] { "monitor_rollup" }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA) + new RoleDescriptor( + "rollup_user", + new String[] { "monitor_rollup" }, + null, + null, + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants monitor_rollup cluster privileges, which enable you to perform read-only operations related to rollups." + ) ), entry( "rollup_admin", - new RoleDescriptor("rollup_admin", new String[] { "manage_rollup" }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA) + new RoleDescriptor( + "rollup_admin", + new String[] { "manage_rollup" }, + null, + null, + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants manage_rollup cluster privileges, which enable you to manage and execute all rollup actions." + ) ), entry( "snapshot_user", @@ -645,7 +810,14 @@ private static Map initializeReservedRoles() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, - null + null, + null, + null, + null, + "Grants the necessary privileges to create snapshots of all the indices and to view their metadata. " + + "This role enables users to view the configuration of existing snapshot repositories and snapshot details. " + + "It does not grant authority to remove or add repositories or to restore snapshots. " + + "It also does not enable to change index settings or to read or update data stream or index data." ) ), entry( @@ -661,7 +833,14 @@ private static Map initializeReservedRoles() { .build(), RoleDescriptor.IndicesPrivileges.builder().indices(".enrich-*").privileges("manage", "write").build() }, null, - MetadataUtils.DEFAULT_RESERVED_METADATA + null, + null, + MetadataUtils.DEFAULT_RESERVED_METADATA, + null, + null, + null, + null, + "Grants access to manage all enrich indices (.enrich-*) and all operations on ingest pipelines." ) ), entry("viewer", buildViewerRoleDescriptor()), @@ -703,7 +882,11 @@ private static RoleDescriptor buildViewerRoleDescriptor() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, - null + null, + null, + null, + null, + "Grants read-only access to all features in Kibana (including Solutions) and to data indices." ); } @@ -750,7 +933,11 @@ private static RoleDescriptor buildEditorRoleDescriptor() { null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, - null + null, + null, + null, + null, + "Grants full access to all features in Kibana (including Solutions) and read-only access to data indices." ); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java index 58d6657b99e3..076ac01f1c8f 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java @@ -2673,7 +2673,9 @@ public void testUpdateApiKeysAutoUpdatesLegacySuperuserRoleDescriptor() throws E // raw document has the legacy superuser role descriptor expectRoleDescriptorsForApiKey("limited_by_role_descriptors", legacySuperuserRoleDescriptor, getApiKeyDocument(apiKeyId)); - final Set currentSuperuserRoleDescriptors = Set.of(ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR); + final Set currentSuperuserRoleDescriptors = ApiKeyService.removeUserRoleDescriptorDescriptions( + Set.of(ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR) + ); // The first request is not a noop because we are auto-updating the legacy role descriptors to 8.x role descriptors assertSingleUpdate( apiKeyId, diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java index 55a89e184f84..883d7cb8ab10 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java @@ -371,7 +371,13 @@ && hasRemoteIndices(request.getRoleDescriptors())) { } } - private Set removeUserRoleDescriptorDescriptions(Set userRoleDescriptors) { + /** + * This method removes description from the given user's (limited-by) role descriptors. + * The description field is not supported for API key role descriptors hence storing limited-by roles with descriptions + * would be inconsistent and require handling backwards compatibility. + * Hence why we have to remove them before create/update of API key roles. + */ + static Set removeUserRoleDescriptorDescriptions(Set userRoleDescriptors) { return userRoleDescriptors.stream().map(roleDescriptor -> { if (roleDescriptor.hasDescription()) { return new RoleDescriptor( diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java index 7752b85c6345..0871e2568d22 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java @@ -1158,7 +1158,9 @@ private static Tuple, Map> newApiKeyDocument getFastStoredHashAlgoForTests().hash(new SecureString(key.toCharArray())), "test", authentication, - type == ApiKey.Type.CROSS_CLUSTER ? Set.of() : Collections.singleton(SUPERUSER_ROLE_DESCRIPTOR), + type == ApiKey.Type.CROSS_CLUSTER + ? Set.of() + : ApiKeyService.removeUserRoleDescriptorDescriptions(Set.of(SUPERUSER_ROLE_DESCRIPTOR)), Instant.now(), Instant.now().plus(expiry), keyRoles, @@ -1316,22 +1318,6 @@ public void testParseRoleDescriptorsMap() throws Exception { assertThat(roleDescriptors, hasSize(1)); assertThat(roleDescriptors.get(0), equalTo(roleARoleDescriptor)); - Map superUserRdMap; - try (XContentBuilder builder = JsonXContent.contentBuilder()) { - superUserRdMap = XContentHelper.convertToMap( - XContentType.JSON.xContent(), - BytesReference.bytes(SUPERUSER_ROLE_DESCRIPTOR.toXContent(builder, ToXContent.EMPTY_PARAMS, true)).streamInput(), - false - ); - } - roleDescriptors = service.parseRoleDescriptors( - apiKeyId, - Map.of(SUPERUSER_ROLE_DESCRIPTOR.getName(), superUserRdMap), - randomApiKeyRoleType() - ); - assertThat(roleDescriptors, hasSize(1)); - assertThat(roleDescriptors.get(0), equalTo(SUPERUSER_ROLE_DESCRIPTOR)); - final Map legacySuperUserRdMap; try (XContentBuilder builder = JsonXContent.contentBuilder()) { legacySuperUserRdMap = XContentHelper.convertToMap( @@ -1812,7 +1798,10 @@ public void testApiKeyDocCache() throws IOException, ExecutionException, Interru RoleReference.ApiKeyRoleType.LIMITED_BY ); assertEquals(1, limitedByRoleDescriptors.size()); - assertEquals(SUPERUSER_ROLE_DESCRIPTOR, limitedByRoleDescriptors.get(0)); + RoleDescriptor superuserWithoutDescription = ApiKeyService.removeUserRoleDescriptorDescriptions(Set.of(SUPERUSER_ROLE_DESCRIPTOR)) + .iterator() + .next(); + assertEquals(superuserWithoutDescription, limitedByRoleDescriptors.get(0)); if (metadata == null) { assertNull(cachedApiKeyDoc.metadataFlattened); } else { From 920290a37bee2b56776baf42db3773b8fc76ffd7 Mon Sep 17 00:00:00 2001 From: Niels Bauman <33722607+nielsbauman@users.noreply.github.com> Date: Tue, 14 May 2024 11:14:13 +0200 Subject: [PATCH 116/119] Extract `DataStreamIndices` from `DataStream` class (#107562) We were seeing more and more common fields between "regular" backing indices and failure store indices (i.e. `indices`, `rolloverOnWrite`, `autoShardingEvent`). To avoid having to duplicate these fields (and possibly any future fields), we extract a class that contains these fields. --- .../change-mappings-and-settings.asciidoc | 2 +- .../data-streams/downsampling-manual.asciidoc | 2 +- ...grate-data-stream-from-ilm-to-dsl.asciidoc | 8 +- .../indices/get-data-stream.asciidoc | 2 +- .../datastreams/DataStreamIT.java | 9 +- .../datastreams/FailureStoreQueryParamIT.java | 14 +- .../datastreams/DataStreamFeatures.java | 2 +- .../DeleteDataStreamTransportAction.java | 2 +- .../action/GetDataStreamsTransportAction.java | 4 +- .../UpdateTimeSeriesRangeServiceTests.java | 4 +- .../action/GetDataStreamsResponseTests.java | 14 +- ...DataStreamGlobalRetentionServiceTests.java | 2 +- .../test/data_stream/10_basic.yml | 30 +- .../data_stream/170_modify_data_stream.yml | 24 +- .../190_failure_store_redirection.yml | 16 +- .../200_rollover_failure_store.yml | 20 +- .../30_auto_create_data_stream.yml | 10 +- .../org/elasticsearch/TransportVersions.java | 1 + .../indices/create/AutoCreateAction.java | 4 +- .../rollover/MetadataRolloverService.java | 34 +- .../rollover/TransportRolloverAction.java | 4 +- .../datastreams/GetDataStreamAction.java | 103 ++-- .../action/index/IndexRequest.java | 4 +- .../cluster/metadata/DataStream.java | 551 +++++++++++------- .../cluster/metadata/DataStreamMetadata.java | 4 +- .../metadata/IndexNameExpressionResolver.java | 6 +- .../cluster/metadata/Metadata.java | 7 +- .../MetadataCreateDataStreamService.java | 4 +- .../metadata/MetadataDataStreamsService.java | 34 +- .../snapshots/RestoreService.java | 5 +- .../MetadataRolloverServiceTests.java | 15 +- .../TransportRolloverActionTests.java | 7 +- .../DataStreamAutoShardingServiceTests.java | 8 +- .../cluster/metadata/DataStreamTests.java | 142 +++-- .../MetadataDataStreamsServiceTests.java | 7 +- .../metadata/DataStreamTestHelper.java | 45 +- .../ReactiveStorageDeciderService.java | 5 +- .../ccr/action/TransportPutFollowAction.java | 11 +- .../integration/DataStreamSecurityIT.java | 7 +- 39 files changed, 699 insertions(+), 474 deletions(-) diff --git a/docs/reference/data-streams/change-mappings-and-settings.asciidoc b/docs/reference/data-streams/change-mappings-and-settings.asciidoc index 47c3529ceef4..c96f0c7342a9 100644 --- a/docs/reference/data-streams/change-mappings-and-settings.asciidoc +++ b/docs/reference/data-streams/change-mappings-and-settings.asciidoc @@ -602,7 +602,7 @@ stream's oldest backing index. // TESTRESPONSE[s/"index_uuid": "_eEfRrFHS9OyhqWntkgHAQ"/"index_uuid": $body.data_streams.0.indices.1.index_uuid/] // TESTRESPONSE[s/"index_name": ".ds-my-data-stream-2099.03.07-000001"/"index_name": $body.data_streams.0.indices.0.index_name/] // TESTRESPONSE[s/"index_name": ".ds-my-data-stream-2099.03.08-000002"/"index_name": $body.data_streams.0.indices.1.index_name/] -// TESTRESPONSE[s/"status": "GREEN"/"status": "YELLOW","failure_indices":[],"failure_store":false/] +// TESTRESPONSE[s/"status": "GREEN"/"status": "YELLOW","failure_store":{"enabled": false, "indices": [], "rollover_on_write": false}/] <1> First item in the `indices` array for `my-data-stream`. This item contains information about the stream's oldest backing index, diff --git a/docs/reference/data-streams/downsampling-manual.asciidoc b/docs/reference/data-streams/downsampling-manual.asciidoc index 5e0c09f9d2be..8f6b39d2aa0d 100644 --- a/docs/reference/data-streams/downsampling-manual.asciidoc +++ b/docs/reference/data-streams/downsampling-manual.asciidoc @@ -389,7 +389,7 @@ This returns: // TESTRESPONSE[s/"ltOJGmqgTVm4T-Buoe7Acg"/$body.data_streams.0.indices.0.index_uuid/] // TESTRESPONSE[s/"2023-07-26T09:26:42.000Z"/$body.data_streams.0.time_series.temporal_ranges.0.start/] // TESTRESPONSE[s/"2023-07-26T13:26:42.000Z"/$body.data_streams.0.time_series.temporal_ranges.0.end/] -// TESTRESPONSE[s/"replicated": false/"replicated": false,"failure_indices":[],"failure_store":false/] +// TESTRESPONSE[s/"replicated": false/"replicated": false,"failure_store":{"enabled": false, "indices": [], "rollover_on_write": false}/] <1> The backing index for this data stream. Before a backing index can be downsampled, the TSDS needs to be rolled over and diff --git a/docs/reference/data-streams/lifecycle/tutorial-migrate-data-stream-from-ilm-to-dsl.asciidoc b/docs/reference/data-streams/lifecycle/tutorial-migrate-data-stream-from-ilm-to-dsl.asciidoc index 6bfa9ad9b00c..b89f55dd4157 100644 --- a/docs/reference/data-streams/lifecycle/tutorial-migrate-data-stream-from-ilm-to-dsl.asciidoc +++ b/docs/reference/data-streams/lifecycle/tutorial-migrate-data-stream-from-ilm-to-dsl.asciidoc @@ -147,7 +147,7 @@ and that the next generation index will also be managed by {ilm-init}: // TESTRESPONSE[s/"index_uuid": "xCEhwsp8Tey0-FLNFYVwSg"/"index_uuid": $body.data_streams.0.indices.0.index_uuid/] // TESTRESPONSE[s/"index_name": ".ds-dsl-data-stream-2023.10.19-000002"/"index_name": $body.data_streams.0.indices.1.index_name/] // TESTRESPONSE[s/"index_uuid": "PA_JquKGSiKcAKBA8DJ5gw"/"index_uuid": $body.data_streams.0.indices.1.index_uuid/] -// TESTRESPONSE[s/"status": "GREEN"/"status": "YELLOW","failure_indices":[],"failure_store":false/] +// TESTRESPONSE[s/"status": "GREEN"/"status": "YELLOW","failure_store":{"enabled": false, "indices": [], "rollover_on_write": false}/] <1> The name of the backing index. <2> For each backing index we display the value of the <> @@ -284,7 +284,7 @@ GET _data_stream/dsl-data-stream // TESTRESPONSE[s/"index_uuid": "xCEhwsp8Tey0-FLNFYVwSg"/"index_uuid": $body.data_streams.0.indices.0.index_uuid/] // TESTRESPONSE[s/"index_name": ".ds-dsl-data-stream-2023.10.19-000002"/"index_name": $body.data_streams.0.indices.1.index_name/] // TESTRESPONSE[s/"index_uuid": "PA_JquKGSiKcAKBA8DJ5gw"/"index_uuid": $body.data_streams.0.indices.1.index_uuid/] -// TESTRESPONSE[s/"status": "GREEN"/"status": "YELLOW","failure_indices":[],"failure_store":false/] +// TESTRESPONSE[s/"status": "GREEN"/"status": "YELLOW","failure_store":{"enabled": false, "indices": [], "rollover_on_write": false}/] <1> The existing backing index will continue to be managed by {ilm-init} <2> The existing backing index will continue to be managed by {ilm-init} @@ -364,7 +364,7 @@ GET _data_stream/dsl-data-stream // TESTRESPONSE[s/"index_uuid": "PA_JquKGSiKcAKBA8DJ5gw"/"index_uuid": $body.data_streams.0.indices.1.index_uuid/] // TESTRESPONSE[s/"index_name": ".ds-dsl-data-stream-2023.10.19-000003"/"index_name": $body.data_streams.0.indices.2.index_name/] // TESTRESPONSE[s/"index_uuid": "PA_JquKGSiKcAKBA8abcd1"/"index_uuid": $body.data_streams.0.indices.2.index_uuid/] -// TESTRESPONSE[s/"status": "GREEN"/"status": "YELLOW","failure_indices":[],"failure_store":false/] +// TESTRESPONSE[s/"status": "GREEN"/"status": "YELLOW","failure_store":{"enabled": false, "indices": [], "rollover_on_write": false}/] <1> The backing indices that existed before rollover will continue to be managed by {ilm-init} <2> The backing indices that existed before rollover will continue to be managed by {ilm-init} @@ -462,7 +462,7 @@ GET _data_stream/dsl-data-stream // TESTRESPONSE[s/"index_uuid": "PA_JquKGSiKcAKBA8DJ5gw"/"index_uuid": $body.data_streams.0.indices.1.index_uuid/] // TESTRESPONSE[s/"index_name": ".ds-dsl-data-stream-2023.10.19-000003"/"index_name": $body.data_streams.0.indices.2.index_name/] // TESTRESPONSE[s/"index_uuid": "PA_JquKGSiKcAKBA8abcd1"/"index_uuid": $body.data_streams.0.indices.2.index_uuid/] -// TESTRESPONSE[s/"status": "GREEN"/"status": "YELLOW","failure_indices":[],"failure_store":false/] +// TESTRESPONSE[s/"status": "GREEN"/"status": "YELLOW","failure_store":{"enabled": false, "indices": [], "rollover_on_write": false}/] <1> The write index is now managed by {ilm-init} <2> The `lifecycle` configured on the data stream is now disabled. <3> The next write index will be managed by {ilm-init} diff --git a/docs/reference/indices/get-data-stream.asciidoc b/docs/reference/indices/get-data-stream.asciidoc index 240a33164b37..0a318cd13591 100644 --- a/docs/reference/indices/get-data-stream.asciidoc +++ b/docs/reference/indices/get-data-stream.asciidoc @@ -358,4 +358,4 @@ The API returns the following response: // TESTRESPONSE[s/"index_name": ".ds-my-data-stream-two-2099.03.08-000001"/"index_name": $body.data_streams.1.indices.0.index_name/] // TESTRESPONSE[s/"index_uuid": "3liBu2SYS5axasRt6fUIpA"/"index_uuid": $body.data_streams.1.indices.0.index_uuid/] // TESTRESPONSE[s/"status": "GREEN"/"status": "YELLOW"/] -// TESTRESPONSE[s/"replicated": false/"replicated": false,"failure_indices":[],"failure_store":false/] +// TESTRESPONSE[s/"replicated": false/"replicated": false,"failure_store":{"enabled": false, "indices": [], "rollover_on_write": false}/] diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java index cf4eaab76301..2b1a8e1c0e31 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java @@ -1779,7 +1779,14 @@ public void testRemoveGhostReference() throws Exception { public ClusterState execute(ClusterState currentState) throws Exception { DataStream original = currentState.getMetadata().dataStreams().get(dataStreamName); DataStream broken = original.copy() - .setIndices(List.of(new Index(original.getIndices().get(0).getName(), "broken"), original.getIndices().get(1))) + .setBackingIndices( + original.getBackingIndices() + .copy() + .setIndices( + List.of(new Index(original.getIndices().get(0).getName(), "broken"), original.getIndices().get(1)) + ) + .build() + ) .build(); brokenDataStreamHolder.set(broken); return ClusterState.builder(currentState) diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/FailureStoreQueryParamIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/FailureStoreQueryParamIT.java index 1d8de6b9ac5f..27cd5697fd0f 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/FailureStoreQueryParamIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/FailureStoreQueryParamIT.java @@ -58,7 +58,7 @@ public void setup() throws IOException { assertThat(dataStreams.size(), is(1)); Map dataStream = (Map) dataStreams.get(0); assertThat(dataStream.get("name"), equalTo(DATA_STREAM_NAME)); - List backingIndices = getBackingIndices(dataStream); + List backingIndices = getIndices(dataStream); assertThat(backingIndices.size(), is(1)); List failureStore = getFailureStore(dataStream); assertThat(failureStore.size(), is(1)); @@ -199,18 +199,16 @@ public void testPutIndexMappingApi() throws IOException { } } - private List getBackingIndices(Map response) { - return getIndices(response, "indices"); - } - + @SuppressWarnings("unchecked") private List getFailureStore(Map response) { - return getIndices(response, "failure_indices"); + var failureStore = (Map) response.get("failure_store"); + return getIndices(failureStore); } @SuppressWarnings("unchecked") - private List getIndices(Map response, String fieldName) { - List> indices = (List>) response.get(fieldName); + private List getIndices(Map response) { + List> indices = (List>) response.get("indices"); return indices.stream().map(index -> index.get("index_name")).toList(); } } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java index 721630d29b4c..464a11ce8a06 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java @@ -37,7 +37,7 @@ public Set getFeatures() { DataStreamLifecycleHealthInfoPublisher.DSL_HEALTH_INFO_FEATURE, // Added in 8.12 LazyRolloverAction.DATA_STREAM_LAZY_ROLLOVER, // Added in 8.13 DataStreamAutoShardingService.DATA_STREAM_AUTO_SHARDING_FEATURE, - DataStreamGlobalRetention.GLOBAL_RETENTION // Added in 8.14 + DataStreamGlobalRetention.GLOBAL_RETENTION // Added in 8.14 ); } } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportAction.java index c3e8331b856f..a614a2dc40e2 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportAction.java @@ -155,7 +155,7 @@ static ClusterState removeDataStream( DataStream dataStream = currentState.metadata().dataStreams().get(dataStreamName); assert dataStream != null; backingIndicesToRemove.addAll(dataStream.getIndices()); - backingIndicesToRemove.addAll(dataStream.getFailureIndices()); + backingIndicesToRemove.addAll(dataStream.getFailureIndices().getIndices()); } // first delete the data streams and then the indices: diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportAction.java index f7064eb39a01..8017b1c72f86 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/GetDataStreamsTransportAction.java @@ -145,8 +145,8 @@ static GetDataStreamAction.Response innerOperation( Map backingIndicesSettingsValues = new HashMap<>(); Metadata metadata = state.getMetadata(); collectIndexSettingsValues(dataStream, backingIndicesSettingsValues, metadata, dataStream.getIndices()); - if (DataStream.isFailureStoreFeatureFlagEnabled() && dataStream.getFailureIndices().isEmpty() == false) { - collectIndexSettingsValues(dataStream, backingIndicesSettingsValues, metadata, dataStream.getFailureIndices()); + if (DataStream.isFailureStoreFeatureFlagEnabled() && dataStream.getFailureIndices().getIndices().isEmpty() == false) { + collectIndexSettingsValues(dataStream, backingIndicesSettingsValues, metadata, dataStream.getFailureIndices().getIndices()); } GetDataStreamAction.Response.TimeSeries timeSeries = null; diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeServiceTests.java index 66133e9fbe0f..4b0eaa6c46ba 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeServiceTests.java @@ -139,7 +139,9 @@ public void testUpdateTimeSeriesTemporalRange_NoUpdateBecauseReplicated() { List.of(new Tuple<>(start.minus(4, ChronoUnit.HOURS), start), new Tuple<>(start, end)) ).getMetadata(); DataStream d = metadata.dataStreams().get(dataStreamName); - metadata = Metadata.builder(metadata).put(d.copy().setReplicated(true).setRolloverOnWrite(false).build()).build(); + metadata = Metadata.builder(metadata) + .put(d.copy().setReplicated(true).setBackingIndices(d.getBackingIndices().copy().setRolloverOnWrite(false).build()).build()) + .build(); now = now.plus(1, ChronoUnit.HOURS); ClusterState in = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(metadata).build(); diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java index ec6e624794a0..4059127b5eb8 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/action/GetDataStreamsResponseTests.java @@ -82,7 +82,7 @@ public void testResponseIlmAndDataStreamLifecycleRepresentation() throws Excepti .setIndexMode(IndexMode.STANDARD) .setLifecycle(new DataStreamLifecycle()) .setFailureStoreEnabled(true) - .setFailureIndices(failureStores) + .setFailureIndices(DataStream.DataStreamIndices.failureIndicesBuilder(failureStores).build()) .build(); String ilmPolicyName = "rollover-30days"; @@ -159,9 +159,8 @@ public void testResponseIlmAndDataStreamLifecycleRepresentation() throws Excepti ); if (DataStream.isFailureStoreFeatureFlagEnabled()) { - List failureStoresRepresentation = (List) dataStreamMap.get( - DataStream.FAILURE_INDICES_FIELD.getPreferredName() - ); + var failureStore = (Map) dataStreamMap.get(DataStream.FAILURE_STORE_FIELD.getPreferredName()); + List failureStoresRepresentation = (List) failureStore.get(DataStream.INDICES_FIELD.getPreferredName()); Map failureStoreRepresentation = (Map) failureStoresRepresentation.get(0); assertThat(failureStoreRepresentation.get("index_name"), is(failureStoreIndex.getName())); assertThat(failureStoreRepresentation.get(Response.DataStreamInfo.PREFER_ILM.getPreferredName()), is(false)); @@ -185,7 +184,7 @@ public void testResponseIlmAndDataStreamLifecycleRepresentation() throws Excepti .setIndexMode(IndexMode.STANDARD) .setLifecycle(new DataStreamLifecycle(null, null, false)) .setFailureStoreEnabled(true) - .setFailureIndices(failureStores) + .setFailureIndices(DataStream.DataStreamIndices.failureIndicesBuilder(failureStores).build()) .build(); String ilmPolicyName = "rollover-30days"; @@ -251,9 +250,8 @@ public void testResponseIlmAndDataStreamLifecycleRepresentation() throws Excepti ); if (DataStream.isFailureStoreFeatureFlagEnabled()) { - List failureStoresRepresentation = (List) dataStreamMap.get( - DataStream.FAILURE_INDICES_FIELD.getPreferredName() - ); + var failureStore = (Map) dataStreamMap.get(DataStream.FAILURE_STORE_FIELD.getPreferredName()); + List failureStoresRepresentation = (List) failureStore.get(DataStream.INDICES_FIELD.getPreferredName()); Map failureStoreRepresentation = (Map) failureStoresRepresentation.get(0); assertThat(failureStoreRepresentation.get("index_name"), is(failureStoreIndex.getName())); assertThat(failureStoreRepresentation.get(Response.DataStreamInfo.PREFER_ILM.getPreferredName()), is(false)); diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/UpdateDataStreamGlobalRetentionServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/UpdateDataStreamGlobalRetentionServiceTests.java index b9dc6d349873..41d00d063955 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/UpdateDataStreamGlobalRetentionServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/UpdateDataStreamGlobalRetentionServiceTests.java @@ -261,7 +261,7 @@ private static DataStream newDataStreamInstance( .setReplicated(replicated) .setLifecycle(lifecycle) .setFailureStoreEnabled(failureStores.isEmpty() == false) - .setFailureIndices(failureStores); + .setFailureIndices(DataStream.DataStreamIndices.failureIndicesBuilder(failureStores).build()); if (randomBoolean()) { builder.setSystem(true); builder.setHidden(true); diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml index a7ec53782382..20485402b07a 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/10_basic.yml @@ -210,8 +210,8 @@ setup: --- "Create data stream with failure store": - requires: - cluster_features: ["gte_v8.11.0"] - reason: "data stream failure stores only creatable in 8.11+" + cluster_features: ["gte_v8.15.0"] + reason: "data stream failure stores REST structure changed in 8.15+" - do: allowed_warnings: @@ -248,9 +248,9 @@ setup: - match: { data_streams.0.status: 'GREEN' } - match: { data_streams.0.template: 'my-template4' } - match: { data_streams.0.hidden: false } - - match: { data_streams.0.failure_store: true } - - length: { data_streams.0.failure_indices: 1 } - - match: { data_streams.0.failure_indices.0.index_name: '/\.fs-failure-data-stream1-(\d{4}\.\d{2}\.\d{2}-)?000001/'} + - match: { data_streams.0.failure_store.enabled: true } + - length: { data_streams.0.failure_store.indices: 1 } + - match: { data_streams.0.failure_store.indices.0.index_name: '/\.fs-failure-data-stream1-(\d{4}\.\d{2}\.\d{2}-)?000001/'} - match: { data_streams.1.name: failure-data-stream2 } - match: { data_streams.1.timestamp_field.name: '@timestamp' } @@ -259,15 +259,15 @@ setup: - match: { data_streams.1.indices.0.index_name: '/\.ds-failure-data-stream2-(\d{4}\.\d{2}\.\d{2}-)?000001/' } - match: { data_streams.1.template: 'my-template4' } - match: { data_streams.1.hidden: false } - - match: { data_streams.1.failure_store: true } - - length: { data_streams.1.failure_indices: 1 } - - match: { data_streams.1.failure_indices.0.index_name: '/\.fs-failure-data-stream2-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { data_streams.1.failure_store.enabled: true } + - length: { data_streams.1.failure_store.indices: 1 } + - match: { data_streams.1.failure_store.indices.0.index_name: '/\.fs-failure-data-stream2-(\d{4}\.\d{2}\.\d{2}-)?000001/' } # save the backing index names for later use - set: { data_streams.0.indices.0.index_name: idx0name } - - set: { data_streams.0.failure_indices.0.index_name: fsidx0name } + - set: { data_streams.0.failure_store.indices.0.index_name: fsidx0name } - set: { data_streams.1.indices.0.index_name: idx1name } - - set: { data_streams.1.failure_indices.0.index_name: fsidx1name } + - set: { data_streams.1.failure_store.indices.0.index_name: fsidx1name } - do: indices.get_mapping: @@ -538,8 +538,8 @@ setup: --- "Delete data stream with failure stores": - requires: - cluster_features: ["gte_v8.12.0"] - reason: "data stream failure stores only supported in 8.12+" + cluster_features: ["gte_v8.15.0"] + reason: "data stream failure stores REST structure changed in 8.15+" - do: allowed_warnings: @@ -570,7 +570,7 @@ setup: name: failure-data-stream1 - set: { data_streams.0.indices.0.index_name: idx0name } - - set: { data_streams.0.failure_indices.0.index_name: fs0name } + - set: { data_streams.0.failure_store.indices.0.index_name: fs0name } - do: indices.get: @@ -586,8 +586,8 @@ setup: - match: { data_streams.0.generation: 1 } - length: { data_streams.0.indices: 1 } - match: { data_streams.0.indices.0.index_name: '/\.ds-failure-data-stream1-(\d{4}\.\d{2}\.\d{2}-)?000001/' } - - length: { data_streams.0.failure_indices: 1 } - - match: { data_streams.0.failure_indices.0.index_name: '/\.fs-failure-data-stream1-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - length: { data_streams.0.failure_store.indices: 1 } + - match: { data_streams.0.failure_store.indices.0.index_name: '/\.fs-failure-data-stream1-(\d{4}\.\d{2}\.\d{2}-)?000001/' } - do: indices.delete_data_stream: diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/170_modify_data_stream.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/170_modify_data_stream.yml index 8c0e27373664..a3baa524259b 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/170_modify_data_stream.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/170_modify_data_stream.yml @@ -92,8 +92,8 @@ --- "Modify a data stream's failure store": - requires: - cluster_features: [ "gte_v8.14.0" ] - reason: "this API was released in 8.14.0" + cluster_features: ["gte_v8.15.0"] + reason: "data stream failure stores REST structure changed in 8.15+" test_runner_features: [ "allowed_warnings" ] - do: @@ -128,14 +128,14 @@ indices.get_data_stream: name: data-stream-for-modification - set: { data_streams.0.indices.0.index_name: write_index } - - set: { data_streams.0.failure_indices.0.index_name: first_failure_index } - - set: { data_streams.0.failure_indices.1.index_name: write_failure_index } + - set: { data_streams.0.failure_store.indices.0.index_name: first_failure_index } + - set: { data_streams.0.failure_store.indices.1.index_name: write_failure_index } - do: indices.get_data_stream: name: data-stream-for-modification2 - set: { data_streams.0.indices.0.index_name: second_write_index } - - set: { data_streams.0.failure_indices.0.index_name: second_write_failure_index } + - set: { data_streams.0.failure_store.indices.0.index_name: second_write_failure_index } - do: index: @@ -170,11 +170,11 @@ - match: { data_streams.0.timestamp_field.name: '@timestamp' } - match: { data_streams.0.generation: 3 } - length: { data_streams.0.indices: 1 } - - length: { data_streams.0.failure_indices: 3 } + - length: { data_streams.0.failure_store.indices: 3 } - match: { data_streams.0.indices.0.index_name: $write_index } - - match: { data_streams.0.failure_indices.0.index_name: 'test_index1' } - - match: { data_streams.0.failure_indices.1.index_name: $first_failure_index } - - match: { data_streams.0.failure_indices.2.index_name: $write_failure_index } + - match: { data_streams.0.failure_store.indices.0.index_name: 'test_index1' } + - match: { data_streams.0.failure_store.indices.1.index_name: $first_failure_index } + - match: { data_streams.0.failure_store.indices.2.index_name: $write_failure_index } # An index that has an alias is not allowed to be added to failure store - do: @@ -269,10 +269,10 @@ - match: { data_streams.0.timestamp_field.name: '@timestamp' } - match: { data_streams.0.generation: 4 } - length: { data_streams.0.indices: 1 } - - length: { data_streams.0.failure_indices: 2 } + - length: { data_streams.0.failure_store.indices: 2 } - match: { data_streams.0.indices.0.index_name: $write_index } - - match: { data_streams.0.failure_indices.0.index_name: $first_failure_index } - - match: { data_streams.0.failure_indices.1.index_name: $write_failure_index } + - match: { data_streams.0.failure_store.indices.0.index_name: $first_failure_index } + - match: { data_streams.0.failure_store.indices.1.index_name: $write_failure_index } - do: indices.delete_data_stream: diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml index 7268ee9bb3b5..9dce5150388d 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml @@ -23,8 +23,8 @@ teardown: --- "Redirect ingest failure in data stream to failure store": - requires: - cluster_features: ["gte_v8.13.0"] - reason: "data stream failure stores only redirect ingest failures in 8.13+" + cluster_features: ["gte_v8.15.0"] + reason: "data stream failure stores REST structure changed in 8.15+" test_runner_features: [allowed_warnings, contains] - do: @@ -74,9 +74,9 @@ teardown: - match: { data_streams.0.timestamp_field.name: '@timestamp' } - length: { data_streams.0.indices: 1 } - match: { data_streams.0.indices.0.index_name: '/\.ds-logs-foobar-(\d{4}\.\d{2}\.\d{2}-)?000001/' } - - match: { data_streams.0.failure_store: true } - - length: { data_streams.0.failure_indices: 1 } - - match: { data_streams.0.failure_indices.0.index_name: '/\.fs-logs-foobar-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { data_streams.0.failure_store.enabled: true } + - length: { data_streams.0.failure_store.indices: 1 } + - match: { data_streams.0.failure_store.indices.0.index_name: '/\.fs-logs-foobar-(\d{4}\.\d{2}\.\d{2}-)?000001/' } - do: search: @@ -152,9 +152,9 @@ teardown: - match: { data_streams.0.timestamp_field.name: '@timestamp' } - length: { data_streams.0.indices: 1 } - match: { data_streams.0.indices.0.index_name: '/\.ds-logs-foobar-(\d{4}\.\d{2}\.\d{2}-)?000001/' } - - match: { data_streams.0.failure_store: true } - - length: { data_streams.0.failure_indices: 1 } - - match: { data_streams.0.failure_indices.0.index_name: '/\.fs-logs-foobar-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { data_streams.0.failure_store.enabled: true } + - length: { data_streams.0.failure_store.indices: 1 } + - match: { data_streams.0.failure_store.indices.0.index_name: '/\.fs-logs-foobar-(\d{4}\.\d{2}\.\d{2}-)?000001/' } - do: search: diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml index 0074ce425c6f..91d23afa67af 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/200_rollover_failure_store.yml @@ -1,8 +1,8 @@ --- setup: - requires: - cluster_features: ["gte_v8.14.0"] - reason: "data stream failure store rollover only supported in 8.14+" + cluster_features: ["gte_v8.15.0"] + reason: "data stream failure stores REST structure changed in 8.15+" test_runner_features: allowed_warnings - do: @@ -48,9 +48,9 @@ setup: - match: { data_streams.0.generation: 2 } - length: { data_streams.0.indices: 1 } - match: { data_streams.0.indices.0.index_name: '/\.ds-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } - - length: { data_streams.0.failure_indices: 2 } - - match: { data_streams.0.failure_indices.0.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } - - match: { data_streams.0.failure_indices.1.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000002/' } + - length: { data_streams.0.failure_store.indices: 2 } + - match: { data_streams.0.failure_store.indices.0.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { data_streams.0.failure_store.indices.1.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000002/' } --- "Roll over a data stream's failure store with conditions": @@ -86,9 +86,9 @@ setup: - match: { data_streams.0.generation: 2 } - length: { data_streams.0.indices: 1 } - match: { data_streams.0.indices.0.index_name: '/\.ds-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } - - length: { data_streams.0.failure_indices: 2 } - - match: { data_streams.0.failure_indices.0.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } - - match: { data_streams.0.failure_indices.1.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000002/' } + - length: { data_streams.0.failure_store.indices: 2 } + - match: { data_streams.0.failure_store.indices.0.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { data_streams.0.failure_store.indices.1.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000002/' } --- "Don't roll over a data stream's failure store when conditions aren't met": @@ -112,5 +112,5 @@ setup: - match: { data_streams.0.generation: 1 } - length: { data_streams.0.indices: 1 } - match: { data_streams.0.indices.0.index_name: '/\.ds-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } - - length: { data_streams.0.failure_indices: 1 } - - match: { data_streams.0.failure_indices.0.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - length: { data_streams.0.failure_store.indices: 1 } + - match: { data_streams.0.failure_store.indices.0.index_name: '/\.fs-data-stream-for-rollover-(\d{4}\.\d{2}\.\d{2}-)?000001/' } diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/30_auto_create_data_stream.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/30_auto_create_data_stream.yml index 32338fea056a..3ab22e6271c6 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/30_auto_create_data_stream.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/30_auto_create_data_stream.yml @@ -50,8 +50,8 @@ --- "Put index template with failure store": - requires: - cluster_features: ["gte_v8.11.0"] - reason: "data stream failure stores only creatable in 8.11+" + cluster_features: ["gte_v8.15.0"] + reason: "data stream failure stores REST structure changed in 8.15+" test_runner_features: allowed_warnings - do: @@ -91,9 +91,9 @@ - match: { data_streams.0.timestamp_field.name: '@timestamp' } - length: { data_streams.0.indices: 1 } - match: { data_streams.0.indices.0.index_name: '/\.ds-logs-foobar-(\d{4}\.\d{2}\.\d{2}-)?000001/' } - - match: { data_streams.0.failure_store: true } - - length: { data_streams.0.failure_indices: 1 } - - match: { data_streams.0.failure_indices.0.index_name: '/\.fs-logs-foobar-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { data_streams.0.failure_store.enabled: true } + - length: { data_streams.0.failure_store.indices: 1 } + - match: { data_streams.0.failure_store.indices.0.index_name: '/\.fs-logs-foobar-(\d{4}\.\d{2}\.\d{2}-)?000001/' } - do: indices.delete_data_stream: diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 1a6b7bb23804..f1232d2442c8 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -164,6 +164,7 @@ static TransportVersion def(int id) { public static final TransportVersion ML_INFERENCE_AZURE_OPENAI_COMPLETIONS = def(8_655_00_0); public static final TransportVersion JOIN_STATUS_AGE_SERIALIZATION = def(8_656_00_0); public static final TransportVersion ML_RERANK_DOC_OPTIONAL = def(8_657_00_0); + public static final TransportVersion FAILURE_STORE_FIELD_PARITY = def(8_658_00_0); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java index 9cd7d713a3a4..094fccbc3518 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java @@ -266,9 +266,9 @@ ClusterState execute( final var dataStream = clusterState.metadata().dataStreams().get(request.index()); final var backingIndexName = dataStream.getIndices().get(0).getName(); - final var indexNames = dataStream.getFailureIndices().isEmpty() + final var indexNames = dataStream.getFailureIndices().getIndices().isEmpty() ? List.of(backingIndexName) - : List.of(backingIndexName, dataStream.getFailureIndices().get(0).getName()); + : List.of(backingIndexName, dataStream.getFailureIndices().getIndices().get(0).getName()); taskContext.success(getAckListener(indexNames, allocationActionMultiListener)); successfulRequests.put(request, indexNames); return clusterState; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java index 4284d860d85c..ed3721b35f3b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java @@ -196,12 +196,11 @@ public static NameResolution resolveRolloverNames( final IndexAbstraction indexAbstraction = currentState.metadata().getIndicesLookup().get(rolloverTarget); return switch (indexAbstraction.getType()) { case ALIAS -> resolveAliasRolloverNames(currentState.metadata(), indexAbstraction, newIndexName); - case DATA_STREAM -> { - if (isFailureStoreRollover) { - yield resolveDataStreamFailureStoreRolloverNames(currentState.metadata(), (DataStream) indexAbstraction); - } - yield resolveDataStreamRolloverNames(currentState.getMetadata(), (DataStream) indexAbstraction); - } + case DATA_STREAM -> resolveDataStreamRolloverNames( + currentState.metadata(), + (DataStream) indexAbstraction, + isFailureStoreRollover + ); default -> // the validate method above prevents this case throw new IllegalStateException("unable to roll over type [" + indexAbstraction.getType().getDisplayName() + "]"); @@ -220,19 +219,15 @@ private static NameResolution resolveAliasRolloverNames(Metadata metadata, Index return new NameResolution(sourceIndexName, unresolvedName, rolloverIndexName); } - private static NameResolution resolveDataStreamRolloverNames(Metadata metadata, DataStream dataStream) { - final IndexMetadata originalWriteIndex = metadata.index(dataStream.getWriteIndex()); - return new NameResolution(originalWriteIndex.getIndex().getName(), null, dataStream.nextWriteIndexAndGeneration(metadata).v1()); - } - - private static NameResolution resolveDataStreamFailureStoreRolloverNames(Metadata metadata, DataStream dataStream) { - assert dataStream.getFailureStoreWriteIndex() != null : "Unable to roll over failure store with no failure store indices"; + private static NameResolution resolveDataStreamRolloverNames(Metadata metadata, DataStream dataStream, boolean isFailureStoreRollover) { + final DataStream.DataStreamIndices dataStreamIndices = dataStream.getDataStreamIndices(isFailureStoreRollover); + assert dataStreamIndices.getWriteIndex() != null : "Unable to roll over dataStreamIndices with no indices"; - final IndexMetadata originalWriteIndex = metadata.index(dataStream.getFailureStoreWriteIndex()); + final IndexMetadata originalWriteIndex = metadata.index(dataStreamIndices.getWriteIndex()); return new NameResolution( originalWriteIndex.getIndex().getName(), null, - dataStream.nextFailureStoreWriteIndexAndGeneration(metadata).v1() + dataStream.nextWriteIndexAndGeneration(metadata, dataStreamIndices).v1() ); } @@ -327,10 +322,9 @@ private RolloverResult rolloverDataStream( templateV2 = systemDataStreamDescriptor.getComposableIndexTemplate(); } - final Index originalWriteIndex = isFailureStoreRollover ? dataStream.getFailureStoreWriteIndex() : dataStream.getWriteIndex(); - final Tuple nextIndexAndGeneration = isFailureStoreRollover - ? dataStream.nextFailureStoreWriteIndexAndGeneration(currentState.metadata()) - : dataStream.nextWriteIndexAndGeneration(currentState.metadata()); + final DataStream.DataStreamIndices dataStreamIndices = dataStream.getDataStreamIndices(isFailureStoreRollover); + final Index originalWriteIndex = dataStreamIndices.getWriteIndex(); + final Tuple nextIndexAndGeneration = dataStream.nextWriteIndexAndGeneration(metadata, dataStreamIndices); final String newWriteIndexName = nextIndexAndGeneration.v1(); final long newGeneration = nextIndexAndGeneration.v2(); MetadataCreateIndexService.validateIndexName(newWriteIndexName, currentState); // fails if the index already exists @@ -438,7 +432,7 @@ yield new DataStreamAutoShardingEvent( metadataBuilder = withShardSizeForecastForWriteIndex(dataStreamName, metadataBuilder); newState = ClusterState.builder(newState).metadata(metadataBuilder).build(); - newState = MetadataDataStreamsService.setRolloverOnWrite(newState, dataStreamName, false); + newState = MetadataDataStreamsService.setRolloverOnWrite(newState, dataStreamName, false, isFailureStoreRollover); return new RolloverResult(newWriteIndexName, originalWriteIndex.getName(), newState); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java index b9ab28dc80e6..abf42cffdaa0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java @@ -169,12 +169,13 @@ protected void masterOperation( assert task instanceof CancellableTask; Metadata metadata = clusterState.metadata(); // We evaluate the names of the index for which we should evaluate conditions, as well as what our newly created index *would* be. + boolean targetFailureStore = rolloverRequest.indicesOptions().failureStoreOptions().includeFailureIndices(); final MetadataRolloverService.NameResolution trialRolloverNames = MetadataRolloverService.resolveRolloverNames( clusterState, rolloverRequest.getRolloverTarget(), rolloverRequest.getNewIndexName(), rolloverRequest.getCreateIndexRequest(), - rolloverRequest.indicesOptions().failureStoreOptions().includeFailureIndices() + targetFailureStore ); final String trialSourceIndexName = trialRolloverNames.sourceName(); final String trialRolloverIndexName = trialRolloverNames.rolloverName(); @@ -200,6 +201,7 @@ protected void masterOperation( metadataDataStreamsService.setRolloverOnWrite( rolloverRequest.getRolloverTarget(), true, + targetFailureStore, rolloverRequest.ackTimeout(), rolloverRequest.masterNodeTimeout(), listener.map( diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java index 812da87eab10..841a2df5eada 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java @@ -189,6 +189,7 @@ public static class DataStreamInfo implements SimpleDiffable, To public static final ParseField TIME_SINCE_LAST_AUTO_SHARD_EVENT_MILLIS = new ParseField( "time_since_last_auto_shard_event_millis" ); + public static final ParseField FAILURE_STORE_ENABLED = new ParseField("enabled"); private final DataStream dataStream; private final ClusterHealthStatus dataStreamStatus; @@ -222,7 +223,7 @@ public DataStreamInfo( @SuppressWarnings("unchecked") DataStreamInfo(StreamInput in) throws IOException { this( - new DataStream(in), + DataStream.read(in), ClusterHealthStatus.readFrom(in), in.readOptionalString(), in.readOptionalString(), @@ -300,45 +301,8 @@ public XContentBuilder toXContent( .field(DataStream.NAME_FIELD.getPreferredName(), DataStream.TIMESTAMP_FIELD_NAME) .endObject(); - builder.field(DataStream.INDICES_FIELD.getPreferredName()); - if (dataStream.getIndices() == null) { - builder.nullValue(); - } else { - builder.startArray(); - for (Index index : dataStream.getIndices()) { - builder.startObject(); - index.toXContentFragment(builder); - IndexProperties indexProperties = indexSettingsValues.get(index); - if (indexProperties != null) { - builder.field(PREFER_ILM.getPreferredName(), indexProperties.preferIlm()); - if (indexProperties.ilmPolicyName() != null) { - builder.field(ILM_POLICY_FIELD.getPreferredName(), indexProperties.ilmPolicyName()); - } - builder.field(MANAGED_BY.getPreferredName(), indexProperties.managedBy.displayValue); - } - builder.endObject(); - } - builder.endArray(); - } + indicesToXContent(builder, dataStream.getIndices()); builder.field(DataStream.GENERATION_FIELD.getPreferredName(), dataStream.getGeneration()); - if (DataStream.isFailureStoreFeatureFlagEnabled()) { - builder.field(DataStream.FAILURE_INDICES_FIELD.getPreferredName()); - builder.startArray(); - for (Index failureStore : dataStream.getFailureIndices()) { - builder.startObject(); - failureStore.toXContentFragment(builder); - IndexProperties indexProperties = indexSettingsValues.get(failureStore); - if (indexProperties != null) { - builder.field(PREFER_ILM.getPreferredName(), indexProperties.preferIlm()); - if (indexProperties.ilmPolicyName() != null) { - builder.field(ILM_POLICY_FIELD.getPreferredName(), indexProperties.ilmPolicyName()); - } - builder.field(MANAGED_BY.getPreferredName(), indexProperties.managedBy.displayValue); - } - builder.endObject(); - } - builder.endArray(); - } if (dataStream.getMetadata() != null) { builder.field(DataStream.METADATA_FIELD.getPreferredName(), dataStream.getMetadata()); } @@ -361,20 +325,7 @@ public XContentBuilder toXContent( builder.field(ALLOW_CUSTOM_ROUTING.getPreferredName(), dataStream.isAllowCustomRouting()); builder.field(REPLICATED.getPreferredName(), dataStream.isReplicated()); builder.field(ROLLOVER_ON_WRITE.getPreferredName(), dataStream.rolloverOnWrite()); - if (DataStream.isFailureStoreFeatureFlagEnabled()) { - builder.field(DataStream.FAILURE_STORE_FIELD.getPreferredName(), dataStream.isFailureStoreEnabled()); - } - if (dataStream.getAutoShardingEvent() != null) { - DataStreamAutoShardingEvent autoShardingEvent = dataStream.getAutoShardingEvent(); - builder.startObject(AUTO_SHARDING_FIELD.getPreferredName()); - autoShardingEvent.toXContent(builder, params); - builder.humanReadableField( - TIME_SINCE_LAST_AUTO_SHARD_EVENT_MILLIS.getPreferredName(), - TIME_SINCE_LAST_AUTO_SHARD_EVENT.getPreferredName(), - autoShardingEvent.getTimeSinceLastAutoShardingEvent(System::currentTimeMillis) - ); - builder.endObject(); - } + addAutoShardingEvent(builder, params, dataStream.getAutoShardingEvent()); if (timeSeries != null) { builder.startObject(TIME_SERIES.getPreferredName()); builder.startArray(TEMPORAL_RANGES.getPreferredName()); @@ -389,10 +340,56 @@ public XContentBuilder toXContent( builder.endArray(); builder.endObject(); } + if (DataStream.isFailureStoreFeatureFlagEnabled()) { + builder.startObject(DataStream.FAILURE_STORE_FIELD.getPreferredName()); + builder.field(FAILURE_STORE_ENABLED.getPreferredName(), dataStream.isFailureStoreEnabled()); + builder.field( + DataStream.ROLLOVER_ON_WRITE_FIELD.getPreferredName(), + dataStream.getFailureIndices().isRolloverOnWrite() + ); + indicesToXContent(builder, dataStream.getFailureIndices().getIndices()); + addAutoShardingEvent(builder, params, dataStream.getFailureIndices().getAutoShardingEvent()); + builder.endObject(); + } builder.endObject(); return builder; } + private XContentBuilder indicesToXContent(XContentBuilder builder, List indices) throws IOException { + builder.field(DataStream.INDICES_FIELD.getPreferredName()); + builder.startArray(); + for (Index index : indices) { + builder.startObject(); + index.toXContentFragment(builder); + IndexProperties indexProperties = indexSettingsValues.get(index); + if (indexProperties != null) { + builder.field(PREFER_ILM.getPreferredName(), indexProperties.preferIlm()); + if (indexProperties.ilmPolicyName() != null) { + builder.field(ILM_POLICY_FIELD.getPreferredName(), indexProperties.ilmPolicyName()); + } + builder.field(MANAGED_BY.getPreferredName(), indexProperties.managedBy.displayValue); + } + builder.endObject(); + } + builder.endArray(); + return builder; + } + + private void addAutoShardingEvent(XContentBuilder builder, Params params, DataStreamAutoShardingEvent autoShardingEvent) + throws IOException { + if (autoShardingEvent == null) { + return; + } + builder.startObject(AUTO_SHARDING_FIELD.getPreferredName()); + autoShardingEvent.toXContent(builder, params); + builder.humanReadableField( + TIME_SINCE_LAST_AUTO_SHARD_EVENT_MILLIS.getPreferredName(), + TIME_SINCE_LAST_AUTO_SHARD_EVENT.getPreferredName(), + autoShardingEvent.getTimeSinceLastAutoShardingEvent(System::currentTimeMillis) + ); + builder.endObject(); + } + /** * Computes and returns which system will manage the next generation for this data stream. */ diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java index afe918c6853e..794a3f38b56b 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -862,12 +862,12 @@ public Index getConcreteWriteIndex(IndexAbstraction ia, Metadata metadata) { // Resolve write index and get parent data stream to handle the case of dealing with an alias String defaultWriteIndexName = ia.getWriteIndex().getName(); DataStream dataStream = metadata.getIndicesLookup().get(defaultWriteIndexName).getParentDataStream(); - if (dataStream.getFailureIndices().size() < 1) { + if (dataStream.getFailureIndices().getIndices().size() < 1) { throw new ElasticsearchException( "Attempting to write a document to a failure store but the target data stream does not have one enabled" ); } - return dataStream.getFailureIndices().get(dataStream.getFailureIndices().size() - 1); + return dataStream.getFailureIndices().getIndices().get(dataStream.getFailureIndices().getIndices().size() - 1); } else { // Resolve as normal return ia.getWriteIndex(this, metadata); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java index e4b47b8d26c6..a67552c30d5a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java @@ -22,7 +22,6 @@ import org.elasticsearch.cluster.metadata.DataStreamLifecycle.Downsampling.Round; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -52,7 +51,6 @@ import java.util.Collection; import java.util.Comparator; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; @@ -102,7 +100,6 @@ public static boolean isFailureStoreFeatureFlagEnabled() { private final LongSupplier timeProvider; private final String name; - private final List indices; private final long generation; @Nullable private final Map metadata; @@ -114,12 +111,10 @@ public static boolean isFailureStoreFeatureFlagEnabled() { private final IndexMode indexMode; @Nullable private final DataStreamLifecycle lifecycle; - private final boolean rolloverOnWrite; private final boolean failureStoreEnabled; - private final List failureIndices; - private volatile Set failureStoreLookup; - @Nullable - private final DataStreamAutoShardingEvent autoShardingEvent; + + private final DataStreamIndices backingIndices; + private final DataStreamIndices failureIndices; public DataStream( String name, @@ -139,7 +134,6 @@ public DataStream( ) { this( name, - indices, generation, metadata, hidden, @@ -150,16 +144,14 @@ public DataStream( indexMode, lifecycle, failureStoreEnabled, - failureIndices, - rolloverOnWrite, - autoShardingEvent + new DataStreamIndices(BACKING_INDEX_PREFIX, List.copyOf(indices), rolloverOnWrite, autoShardingEvent), + new DataStreamIndices(FAILURE_STORE_PREFIX, List.copyOf(failureIndices), false, null) ); } // visible for testing DataStream( String name, - List indices, long generation, Map metadata, boolean hidden, @@ -170,13 +162,10 @@ public DataStream( IndexMode indexMode, DataStreamLifecycle lifecycle, boolean failureStoreEnabled, - List failureIndices, - boolean rolloverOnWrite, - @Nullable DataStreamAutoShardingEvent autoShardingEvent + DataStreamIndices backingIndices, + DataStreamIndices failureIndices ) { this.name = name; - this.indices = List.copyOf(indices); - assert indices.isEmpty() == false; this.generation = generation; this.metadata = metadata; assert system == false || hidden; // system indices must be hidden @@ -188,21 +177,11 @@ public DataStream( this.indexMode = indexMode; this.lifecycle = lifecycle; this.failureStoreEnabled = failureStoreEnabled; + assert backingIndices.indices.isEmpty() == false; + assert replicated == false || (backingIndices.rolloverOnWrite == false && failureIndices.rolloverOnWrite == false) + : "replicated data streams cannot be marked for lazy rollover"; + this.backingIndices = backingIndices; this.failureIndices = failureIndices; - assert assertConsistent(this.indices); - assert replicated == false || rolloverOnWrite == false : "replicated data streams cannot be marked for lazy rollover"; - this.rolloverOnWrite = rolloverOnWrite; - this.autoShardingEvent = autoShardingEvent; - } - - private static boolean assertConsistent(List indices) { - assert indices.size() > 0; - final Set indexNames = new HashSet<>(); - for (Index index : indices) { - final boolean added = indexNames.add(index.getName()); - assert added : "found duplicate index entries in " + indices; - } - return true; } @Override @@ -222,20 +201,16 @@ public boolean isDataStreamRelated() { @Override public List getIndices() { - return indices; + return backingIndices.indices; } public long getGeneration() { return generation; } - public List getFailureIndices() { - return failureIndices; - } - @Override public Index getWriteIndex() { - return indices.get(indices.size() - 1); + return backingIndices.getWriteIndex(); } /** @@ -243,29 +218,18 @@ public Index getWriteIndex() { */ @Nullable public Index getFailureStoreWriteIndex() { - return isFailureStoreEnabled() == false || failureIndices.isEmpty() ? null : failureIndices.get(failureIndices.size() - 1); + return isFailureStoreEnabled() == false || failureIndices.indices.isEmpty() ? null : failureIndices.getWriteIndex(); } /** * Returns true if the index name provided belongs to a failure store index. - * This method builds a local Set with all the failure store index names and then checks if it contains the name. - * This will perform better if there are multiple indices of this data stream checked. */ public boolean isFailureStoreIndex(String indexName) { - if (failureStoreLookup == null) { - // There is a chance this will be calculated twice, but it's a relatively cheap action, - // so it's not worth synchronising - if (failureIndices == null || failureIndices.isEmpty()) { - failureStoreLookup = Set.of(); - } else { - failureStoreLookup = failureIndices.stream().map(Index::getName).collect(Collectors.toSet()); - } - } - return failureStoreLookup.contains(indexName); + return failureIndices.containsIndex(indexName); } public boolean rolloverOnWrite() { - return rolloverOnWrite; + return backingIndices.rolloverOnWrite; } /** @@ -275,8 +239,8 @@ public boolean rolloverOnWrite() { * an end time that is less than the provided timestamp. Otherwise null is returned. */ public Index selectTimeSeriesWriteIndex(Instant timestamp, Metadata metadata) { - for (int i = indices.size() - 1; i >= 0; i--) { - Index index = indices.get(i); + for (int i = backingIndices.indices.size() - 1; i >= 0; i--) { + Index index = backingIndices.indices.get(i); IndexMetadata im = metadata.index(index); // TODO: make index_mode, start and end time fields in IndexMetadata class. @@ -306,7 +270,7 @@ public Index selectTimeSeriesWriteIndex(Instant timestamp, Metadata metadata) { public void validate(Function imSupplier) { if (indexMode == IndexMode.TIME_SERIES) { // Get a sorted overview of each backing index with there start and end time range: - var startAndEndTimes = indices.stream().map(index -> { + var startAndEndTimes = backingIndices.indices.stream().map(index -> { IndexMetadata im = imSupplier.apply(index.getName()); if (im == null) { throw new IllegalStateException("index [" + index.getName() + "] is not found in the index metadata supplier"); @@ -407,7 +371,19 @@ public DataStreamLifecycle getLifecycle() { * Returns the latest auto sharding event that happened for this data stream */ public DataStreamAutoShardingEvent getAutoShardingEvent() { - return autoShardingEvent; + return backingIndices.autoShardingEvent; + } + + public DataStreamIndices getBackingIndices() { + return backingIndices; + } + + public DataStreamIndices getFailureIndices() { + return failureIndices; + } + + public DataStreamIndices getDataStreamIndices(boolean failureStore) { + return failureStore ? this.failureIndices : backingIndices; } /** @@ -446,15 +422,11 @@ public DataStream unsafeRollover(Index writeIndex, long generation, boolean time indexMode = null; } - List backingIndices = new ArrayList<>(indices); + List backingIndices = new ArrayList<>(this.backingIndices.indices); backingIndices.add(writeIndex); - return copy().setIndices(backingIndices) - .setGeneration(generation) - .setReplicated(false) - .setIndexMode(indexMode) - .setAutoShardingEvent(autoShardingEvent) - .setRolloverOnWrite(false) - .build(); + return copy().setBackingIndices( + this.backingIndices.copy().setIndices(backingIndices).setAutoShardingEvent(autoShardingEvent).setRolloverOnWrite(false).build() + ).setGeneration(generation).setIndexMode(indexMode).build(); } /** @@ -475,56 +447,32 @@ public DataStream rolloverFailureStore(Index writeIndex, long generation) { * Like {@link #rolloverFailureStore(Index, long)}, but does no validation, use with care only. */ public DataStream unsafeRolloverFailureStore(Index writeIndex, long generation) { - List failureIndices = new ArrayList<>(this.failureIndices); + List failureIndices = new ArrayList<>(this.failureIndices.indices); failureIndices.add(writeIndex); - return copy().setGeneration(generation).setReplicated(false).setFailureIndices(failureIndices).build(); + return copy().setGeneration(generation).setFailureIndices(this.failureIndices.copy().setIndices(failureIndices).build()).build(); } /** * Generates the next write index name and generation to be used for rolling over this data stream. * * @param clusterMetadata Cluster metadata + * @param dataStreamIndices The data stream indices that we're generating the next write index name and generation for * @return tuple of the next write index name and next generation. */ - public Tuple nextWriteIndexAndGeneration(Metadata clusterMetadata) { - ensureNotReplicated(); - return unsafeNextWriteIndexAndGeneration(clusterMetadata); - } - - /** - * Like {@link #nextWriteIndexAndGeneration(Metadata)}, but does no validation, use with care only. - */ - public Tuple unsafeNextWriteIndexAndGeneration(Metadata clusterMetadata) { - return generateNextWriteIndexAndGeneration(clusterMetadata, DataStream::getDefaultBackingIndexName); - } - - /** - * Generates the next write index name and generation to be used for rolling over the failure store of this data stream. - * - * @param clusterMetadata Cluster metadata - * @return tuple of the next failure store write index name and next generation. - */ - public Tuple nextFailureStoreWriteIndexAndGeneration(Metadata clusterMetadata) { + public Tuple nextWriteIndexAndGeneration(Metadata clusterMetadata, DataStreamIndices dataStreamIndices) { ensureNotReplicated(); - return unsafeNextFailureStoreWriteIndexAndGeneration(clusterMetadata); + return unsafeNextWriteIndexAndGeneration(clusterMetadata, dataStreamIndices); } /** - * Like {@link #nextFailureStoreWriteIndexAndGeneration(Metadata)}, but does no validation, use with care only. + * Like {@link #nextWriteIndexAndGeneration(Metadata, DataStreamIndices)}, but does no validation, use with care only. */ - public Tuple unsafeNextFailureStoreWriteIndexAndGeneration(Metadata clusterMetadata) { - return generateNextWriteIndexAndGeneration(clusterMetadata, DataStream::getDefaultFailureStoreName); - } - - private Tuple generateNextWriteIndexAndGeneration( - Metadata clusterMetadata, - TriFunction nameGenerator - ) { + public Tuple unsafeNextWriteIndexAndGeneration(Metadata clusterMetadata, DataStreamIndices dataStreamIndices) { String newWriteIndexName; long generation = this.generation; long currentTimeMillis = timeProvider.getAsLong(); do { - newWriteIndexName = nameGenerator.apply(getName(), ++generation, currentTimeMillis); + newWriteIndexName = dataStreamIndices.generateName(name, ++generation, currentTimeMillis); } while (clusterMetadata.hasIndexAbstraction(newWriteIndexName)); return Tuple.tuple(newWriteIndexName, generation); } @@ -544,14 +492,14 @@ private void ensureNotReplicated() { * @throws IllegalArgumentException if {@code index} is not a backing index or is the current write index of the data stream */ public DataStream removeBackingIndex(Index index) { - int backingIndexPosition = indices.indexOf(index); + int backingIndexPosition = backingIndices.indices.indexOf(index); if (backingIndexPosition == -1) { throw new IllegalArgumentException( String.format(Locale.ROOT, "index [%s] is not part of data stream [%s]", index.getName(), name) ); } - if (indices.size() == (backingIndexPosition + 1)) { + if (backingIndices.indices.size() == (backingIndexPosition + 1)) { throw new IllegalArgumentException( String.format( Locale.ROOT, @@ -562,10 +510,12 @@ public DataStream removeBackingIndex(Index index) { ); } - List backingIndices = new ArrayList<>(indices); + List backingIndices = new ArrayList<>(this.backingIndices.indices); backingIndices.remove(index); - assert backingIndices.size() == indices.size() - 1; - return copy().setIndices(backingIndices).setGeneration(generation + 1).build(); + assert backingIndices.size() == this.backingIndices.indices.size() - 1; + return copy().setBackingIndices(this.backingIndices.copy().setIndices(backingIndices).build()) + .setGeneration(generation + 1) + .build(); } /** @@ -578,7 +528,7 @@ public DataStream removeBackingIndex(Index index) { * data stream */ public DataStream removeFailureStoreIndex(Index index) { - int failureIndexPosition = failureIndices.indexOf(index); + int failureIndexPosition = failureIndices.indices.indexOf(index); if (failureIndexPosition == -1) { throw new IllegalArgumentException( @@ -588,7 +538,7 @@ public DataStream removeFailureStoreIndex(Index index) { // TODO: When failure stores are lazily created, this wont necessarily be required anymore. We can remove the failure store write // index as long as we mark the data stream to lazily rollover the failure store with no conditions on its next write - if (failureIndices.size() == (failureIndexPosition + 1)) { + if (failureIndices.indices.size() == (failureIndexPosition + 1)) { throw new IllegalArgumentException( String.format( Locale.ROOT, @@ -599,10 +549,12 @@ public DataStream removeFailureStoreIndex(Index index) { ); } - List updatedFailureIndices = new ArrayList<>(failureIndices); + List updatedFailureIndices = new ArrayList<>(failureIndices.indices); updatedFailureIndices.remove(index); - assert updatedFailureIndices.size() == failureIndices.size() - 1; - return copy().setGeneration(generation + 1).setFailureIndices(updatedFailureIndices).build(); + assert updatedFailureIndices.size() == failureIndices.indices.size() - 1; + return copy().setFailureIndices(failureIndices.copy().setIndices(updatedFailureIndices).build()) + .setGeneration(generation + 1) + .build(); } /** @@ -616,14 +568,14 @@ public DataStream removeFailureStoreIndex(Index index) { * existing index. */ public DataStream replaceBackingIndex(Index existingBackingIndex, Index newBackingIndex) { - List backingIndices = new ArrayList<>(indices); + List backingIndices = new ArrayList<>(this.backingIndices.indices); int backingIndexPosition = backingIndices.indexOf(existingBackingIndex); if (backingIndexPosition == -1) { throw new IllegalArgumentException( String.format(Locale.ROOT, "index [%s] is not part of data stream [%s]", existingBackingIndex.getName(), name) ); } - if (indices.size() == (backingIndexPosition + 1)) { + if (this.backingIndices.indices.size() == (backingIndexPosition + 1)) { throw new IllegalArgumentException( String.format( Locale.ROOT, @@ -634,7 +586,9 @@ public DataStream replaceBackingIndex(Index existingBackingIndex, Index newBacki ); } backingIndices.set(backingIndexPosition, newBackingIndex); - return copy().setIndices(backingIndices).setGeneration(generation + 1).build(); + return copy().setBackingIndices(this.backingIndices.copy().setIndices(backingIndices).build()) + .setGeneration(generation + 1) + .build(); } /** @@ -656,10 +610,12 @@ public DataStream addBackingIndex(Metadata clusterMetadata, Index index) { // ensure that no aliases reference index ensureNoAliasesOnIndex(clusterMetadata, index); - List backingIndices = new ArrayList<>(indices); + List backingIndices = new ArrayList<>(this.backingIndices.indices); backingIndices.add(0, index); - assert backingIndices.size() == indices.size() + 1; - return copy().setIndices(backingIndices).setGeneration(generation + 1).build(); + assert backingIndices.size() == this.backingIndices.indices.size() + 1; + return copy().setBackingIndices(this.backingIndices.copy().setIndices(backingIndices).build()) + .setGeneration(generation + 1) + .build(); } /** @@ -680,10 +636,12 @@ public DataStream addFailureStoreIndex(Metadata clusterMetadata, Index index) { ensureNoAliasesOnIndex(clusterMetadata, index); - List updatedFailureIndices = new ArrayList<>(failureIndices); + List updatedFailureIndices = new ArrayList<>(failureIndices.indices); updatedFailureIndices.add(0, index); - assert updatedFailureIndices.size() == failureIndices.size() + 1; - return copy().setGeneration(generation + 1).setFailureIndices(updatedFailureIndices).build(); + assert updatedFailureIndices.size() == failureIndices.indices.size() + 1; + return copy().setFailureIndices(failureIndices.copy().setIndices(updatedFailureIndices).build()) + .setGeneration(generation + 1) + .build(); } /** @@ -742,7 +700,7 @@ public DataStream promoteDataStream() { @Nullable public DataStream snapshot(Collection indicesInSnapshot) { // do not include indices not available in the snapshot - List reconciledIndices = new ArrayList<>(this.indices); + List reconciledIndices = new ArrayList<>(this.backingIndices.indices); if (reconciledIndices.removeIf(x -> indicesInSnapshot.contains(x.getName()) == false) == false) { return this; } @@ -751,7 +709,9 @@ public DataStream snapshot(Collection indicesInSnapshot) { return null; } - return copy().setIndices(reconciledIndices).setMetadata(metadata == null ? null : new HashMap<>(metadata)).build(); + return copy().setBackingIndices(backingIndices.copy().setIndices(reconciledIndices).build()) + .setMetadata(metadata == null ? null : new HashMap<>(metadata)) + .build(); } /** @@ -792,7 +752,7 @@ public List getDownsamplingRoundsFor( Function indexMetadataSupplier, LongSupplier nowSupplier ) { - assert indices.contains(index) : "the provided index must be a backing index for this datastream"; + assert backingIndices.indices.contains(index) : "the provided index must be a backing index for this datastream"; if (lifecycle == null || lifecycle.getDownsamplingRounds() == null) { return List.of(); } @@ -831,7 +791,7 @@ public List getNonWriteIndicesOlderThan( LongSupplier nowSupplier ) { List olderIndices = new ArrayList<>(); - for (Index index : indices) { + for (Index index : backingIndices.indices) { if (isIndexOderThan(index, retentionPeriod.getMillis(), nowSupplier.getAsLong(), indicesPredicate, indexMetadataSupplier)) { olderIndices.add(index); } @@ -864,7 +824,7 @@ private boolean isIndexOderThan( * we return false. */ public boolean isIndexManagedByDataStreamLifecycle(Index index, Function indexMetadataSupplier) { - if (indices.contains(index) == false) { + if (backingIndices.indices.contains(index) == false) { return false; } IndexMetadata indexMetadata = indexMetadataSupplier.apply(index.getName()); @@ -936,13 +896,7 @@ public static String getDefaultBackingIndexName(String dataStreamName, long gene * @return backing index name */ public static String getDefaultBackingIndexName(String dataStreamName, long generation, long epochMillis) { - return String.format( - Locale.ROOT, - BACKING_INDEX_PREFIX + "%s-%s-%06d", - dataStreamName, - DATE_FORMATTER.formatMillis(epochMillis), - generation - ); + return getDefaultIndexName(BACKING_INDEX_PREFIX, dataStreamName, generation, epochMillis); } /** @@ -955,33 +909,65 @@ public static String getDefaultBackingIndexName(String dataStreamName, long gene * @return backing index name */ public static String getDefaultFailureStoreName(String dataStreamName, long generation, long epochMillis) { - return String.format( - Locale.ROOT, - FAILURE_STORE_PREFIX + "%s-%s-%06d", - dataStreamName, - DATE_FORMATTER.formatMillis(epochMillis), - generation - ); + return getDefaultIndexName(FAILURE_STORE_PREFIX, dataStreamName, generation, epochMillis); } - public DataStream(StreamInput in) throws IOException { - this( - readName(in), - readIndices(in), - in.readVLong(), - in.readGenericMap(), - in.readBoolean(), - in.readBoolean(), - in.readBoolean(), - in.getTransportVersion().onOrAfter(TransportVersions.V_8_0_0) ? in.readBoolean() : false, - in.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0) ? in.readOptionalEnum(IndexMode.class) : null, - in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X) ? in.readOptionalWriteable(DataStreamLifecycle::new) : null, - in.getTransportVersion().onOrAfter(DataStream.ADDED_FAILURE_STORE_TRANSPORT_VERSION) ? in.readBoolean() : false, - in.getTransportVersion().onOrAfter(DataStream.ADDED_FAILURE_STORE_TRANSPORT_VERSION) ? readIndices(in) : List.of(), - in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0) ? in.readBoolean() : false, - in.getTransportVersion().onOrAfter(DataStream.ADDED_AUTO_SHARDING_EVENT_VERSION) - ? in.readOptionalWriteable(DataStreamAutoShardingEvent::new) - : null + /** + * Generates the name of the index that conforms to the default naming convention for indices + * on data streams given the specified prefix, data stream name, generation, and time. + * + * @param prefix the prefix that the index name should have + * @param dataStreamName name of the data stream + * @param generation generation of the data stream + * @param epochMillis creation time for the backing index + * @return backing index name + */ + private static String getDefaultIndexName(String prefix, String dataStreamName, long generation, long epochMillis) { + return String.format(Locale.ROOT, prefix + "%s-%s-%06d", dataStreamName, DATE_FORMATTER.formatMillis(epochMillis), generation); + } + + public static DataStream read(StreamInput in) throws IOException { + var name = readName(in); + var backingIndicesBuilder = DataStreamIndices.backingIndicesBuilder(readIndices(in)); + var generation = in.readVLong(); + var metadata = in.readGenericMap(); + var hidden = in.readBoolean(); + var replicated = in.readBoolean(); + var system = in.readBoolean(); + var allowCustomRouting = in.getTransportVersion().onOrAfter(TransportVersions.V_8_0_0) ? in.readBoolean() : false; + var indexMode = in.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0) ? in.readOptionalEnum(IndexMode.class) : null; + var lifecycle = in.getTransportVersion().onOrAfter(TransportVersions.V_8_9_X) + ? in.readOptionalWriteable(DataStreamLifecycle::new) + : null; + var failureStoreEnabled = in.getTransportVersion().onOrAfter(DataStream.ADDED_FAILURE_STORE_TRANSPORT_VERSION) + ? in.readBoolean() + : false; + var failureIndices = in.getTransportVersion().onOrAfter(DataStream.ADDED_FAILURE_STORE_TRANSPORT_VERSION) + ? readIndices(in) + : List.of(); + var failureIndicesBuilder = DataStreamIndices.failureIndicesBuilder(failureIndices); + backingIndicesBuilder.setRolloverOnWrite(in.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0) ? in.readBoolean() : false); + if (in.getTransportVersion().onOrAfter(DataStream.ADDED_AUTO_SHARDING_EVENT_VERSION)) { + backingIndicesBuilder.setAutoShardingEvent(in.readOptionalWriteable(DataStreamAutoShardingEvent::new)); + } + if (in.getTransportVersion().onOrAfter(TransportVersions.FAILURE_STORE_FIELD_PARITY)) { + failureIndicesBuilder.setRolloverOnWrite(in.readBoolean()) + .setAutoShardingEvent(in.readOptionalWriteable(DataStreamAutoShardingEvent::new)); + } + return new DataStream( + name, + generation, + metadata, + hidden, + replicated, + system, + System::currentTimeMillis, + allowCustomRouting, + indexMode, + lifecycle, + failureStoreEnabled, + backingIndicesBuilder.build(), + failureIndicesBuilder.build() ); } @@ -996,14 +982,14 @@ static List readIndices(StreamInput in) throws IOException { } public static Diff readDiffFrom(StreamInput in) throws IOException { - return SimpleDiffable.readDiffFrom(DataStream::new, in); + return SimpleDiffable.readDiffFrom(DataStream::read, in); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(name); out.writeString(TIMESTAMP_FIELD_NAME); // TODO: clear this out in the future https://github.com/elastic/elasticsearch/issues/101991 - out.writeCollection(indices); + out.writeCollection(backingIndices.indices); out.writeVLong(generation); out.writeGenericMap(metadata); out.writeBoolean(hidden); @@ -1020,13 +1006,17 @@ public void writeTo(StreamOutput out) throws IOException { } if (out.getTransportVersion().onOrAfter(DataStream.ADDED_FAILURE_STORE_TRANSPORT_VERSION)) { out.writeBoolean(failureStoreEnabled); - out.writeCollection(failureIndices); + out.writeCollection(failureIndices.indices); } if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_13_0)) { - out.writeBoolean(rolloverOnWrite); + out.writeBoolean(backingIndices.rolloverOnWrite); } if (out.getTransportVersion().onOrAfter(DataStream.ADDED_AUTO_SHARDING_EVENT_VERSION)) { - out.writeOptionalWriteable(autoShardingEvent); + out.writeOptionalWriteable(backingIndices.autoShardingEvent); + } + if (out.getTransportVersion().onOrAfter(TransportVersions.FAILURE_STORE_FIELD_PARITY)) { + out.writeBoolean(failureIndices.rolloverOnWrite); + out.writeOptionalWriteable(failureIndices.autoShardingEvent); } } @@ -1045,30 +1035,41 @@ public void writeTo(StreamOutput out) throws IOException { public static final ParseField FAILURE_INDICES_FIELD = new ParseField("failure_indices"); public static final ParseField ROLLOVER_ON_WRITE_FIELD = new ParseField("rollover_on_write"); public static final ParseField AUTO_SHARDING_FIELD = new ParseField("auto_sharding"); + public static final ParseField FAILURE_ROLLOVER_ON_WRITE_FIELD = new ParseField("failure_rollover_on_write"); + public static final ParseField FAILURE_AUTO_SHARDING_FIELD = new ParseField("failure_auto_sharding"); @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("data_stream", args -> { // Fields behind a feature flag need to be parsed last otherwise the parser will fail when the feature flag is disabled. // Until the feature flag is removed we keep them separately to be mindful of this. boolean failureStoreEnabled = DataStream.isFailureStoreFeatureFlagEnabled() && args[12] != null && (boolean) args[12]; - List failureStoreIndices = DataStream.isFailureStoreFeatureFlagEnabled() && args[13] != null - ? (List) args[13] - : List.of(); + DataStreamIndices failureIndices = DataStream.isFailureStoreFeatureFlagEnabled() + ? new DataStreamIndices( + FAILURE_STORE_PREFIX, + args[13] != null ? (List) args[13] : List.of(), + args[14] != null && (boolean) args[14], + (DataStreamAutoShardingEvent) args[15] + ) + : new DataStreamIndices(FAILURE_STORE_PREFIX, List.of(), false, null); return new DataStream( (String) args[0], - (List) args[1], (Long) args[2], (Map) args[3], args[4] != null && (boolean) args[4], args[5] != null && (boolean) args[5], args[6] != null && (boolean) args[6], + System::currentTimeMillis, args[7] != null && (boolean) args[7], args[8] != null ? IndexMode.fromString((String) args[8]) : null, (DataStreamLifecycle) args[9], failureStoreEnabled, - failureStoreIndices, - args[10] != null && (boolean) args[10], - (DataStreamAutoShardingEvent) args[11] + new DataStreamIndices( + BACKING_INDEX_PREFIX, + (List) args[1], + args[10] != null && (boolean) args[10], + (DataStreamAutoShardingEvent) args[11] + ), + failureIndices ); }); @@ -1105,6 +1106,12 @@ public void writeTo(StreamOutput out) throws IOException { (p, c) -> Index.fromXContent(p), FAILURE_INDICES_FIELD ); + PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), FAILURE_ROLLOVER_ON_WRITE_FIELD); + PARSER.declareObject( + ConstructingObjectParser.optionalConstructorArg(), + (p, c) -> DataStreamAutoShardingEvent.fromXContent(p), + FAILURE_AUTO_SHARDING_FIELD + ); } } @@ -1132,11 +1139,8 @@ public XContentBuilder toXContent( .startObject() .field(NAME_FIELD.getPreferredName(), TIMESTAMP_FIELD_NAME) .endObject(); - builder.xContentList(INDICES_FIELD.getPreferredName(), indices); + builder.xContentList(INDICES_FIELD.getPreferredName(), backingIndices.indices); builder.field(GENERATION_FIELD.getPreferredName(), generation); - if (DataStream.isFailureStoreFeatureFlagEnabled() && failureIndices.isEmpty() == false) { - builder.xContentList(FAILURE_INDICES_FIELD.getPreferredName(), failureIndices); - } if (metadata != null) { builder.field(METADATA_FIELD.getPreferredName(), metadata); } @@ -1146,6 +1150,15 @@ public XContentBuilder toXContent( builder.field(ALLOW_CUSTOM_ROUTING.getPreferredName(), allowCustomRouting); if (DataStream.isFailureStoreFeatureFlagEnabled()) { builder.field(FAILURE_STORE_FIELD.getPreferredName(), failureStoreEnabled); + if (failureIndices.indices.isEmpty() == false) { + builder.xContentList(FAILURE_INDICES_FIELD.getPreferredName(), failureIndices.indices); + } + builder.field(FAILURE_ROLLOVER_ON_WRITE_FIELD.getPreferredName(), failureIndices.rolloverOnWrite); + if (failureIndices.autoShardingEvent != null) { + builder.startObject(FAILURE_AUTO_SHARDING_FIELD.getPreferredName()); + failureIndices.autoShardingEvent.toXContent(builder, params); + builder.endObject(); + } } if (indexMode != null) { builder.field(INDEX_MODE.getPreferredName(), indexMode); @@ -1154,10 +1167,10 @@ public XContentBuilder toXContent( builder.field(LIFECYCLE.getPreferredName()); lifecycle.toXContent(builder, params, rolloverConfiguration, isSystem() ? null : globalRetention); } - builder.field(ROLLOVER_ON_WRITE_FIELD.getPreferredName(), rolloverOnWrite); - if (autoShardingEvent != null) { + builder.field(ROLLOVER_ON_WRITE_FIELD.getPreferredName(), backingIndices.rolloverOnWrite); + if (backingIndices.autoShardingEvent != null) { builder.startObject(AUTO_SHARDING_FIELD.getPreferredName()); - autoShardingEvent.toXContent(builder, params); + backingIndices.autoShardingEvent.toXContent(builder, params); builder.endObject(); } builder.endObject(); @@ -1170,7 +1183,6 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; DataStream that = (DataStream) o; return name.equals(that.name) - && indices.equals(that.indices) && generation == that.generation && Objects.equals(metadata, that.metadata) && hidden == that.hidden @@ -1180,16 +1192,14 @@ public boolean equals(Object o) { && indexMode == that.indexMode && Objects.equals(lifecycle, that.lifecycle) && failureStoreEnabled == that.failureStoreEnabled - && failureIndices.equals(that.failureIndices) - && rolloverOnWrite == that.rolloverOnWrite - && Objects.equals(autoShardingEvent, that.autoShardingEvent); + && Objects.equals(backingIndices, that.backingIndices) + && Objects.equals(failureIndices, that.failureIndices); } @Override public int hashCode() { return Objects.hash( name, - indices, generation, metadata, hidden, @@ -1199,9 +1209,8 @@ public int hashCode() { indexMode, lifecycle, failureStoreEnabled, - failureIndices, - rolloverOnWrite, - autoShardingEvent + backingIndices, + failureIndices ); } @@ -1345,14 +1354,143 @@ public static Builder builder(String name, List indices) { return new Builder(name, indices); } + public static Builder builder(String name, DataStreamIndices backingIndices) { + return new Builder(name, backingIndices); + } + public Builder copy() { return new Builder(this); } + public static class DataStreamIndices { + private final String namePrefix; + private final List indices; + private final boolean rolloverOnWrite; + @Nullable + private final DataStreamAutoShardingEvent autoShardingEvent; + private Set lookup; + + protected DataStreamIndices( + String namePrefix, + List indices, + boolean rolloverOnWrite, + DataStreamAutoShardingEvent autoShardingEvent + ) { + this.namePrefix = namePrefix; + // The list of indices is expected to be an immutable list. We don't create an immutable copy here, as it might have + // impact on the performance on some usages. + this.indices = indices; + this.rolloverOnWrite = rolloverOnWrite; + this.autoShardingEvent = autoShardingEvent; + + assert getLookup().size() == indices.size() : "found duplicate index entries in " + indices; + } + + private Set getLookup() { + if (lookup == null) { + lookup = indices.stream().map(Index::getName).collect(Collectors.toSet()); + } + return lookup; + } + + public Index getWriteIndex() { + return indices.get(indices.size() - 1); + } + + public boolean containsIndex(String index) { + return getLookup().contains(index); + } + + private String generateName(String dataStreamName, long generation, long epochMillis) { + return getDefaultIndexName(namePrefix, dataStreamName, generation, epochMillis); + } + + public static Builder backingIndicesBuilder(List indices) { + return new Builder(BACKING_INDEX_PREFIX, indices); + } + + public static Builder failureIndicesBuilder(List indices) { + return new Builder(FAILURE_STORE_PREFIX, indices); + } + + public Builder copy() { + return new Builder(this); + } + + public List getIndices() { + return indices; + } + + public boolean isRolloverOnWrite() { + return rolloverOnWrite; + } + + public DataStreamAutoShardingEvent getAutoShardingEvent() { + return autoShardingEvent; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DataStreamIndices that = (DataStreamIndices) o; + return rolloverOnWrite == that.rolloverOnWrite + && Objects.equals(namePrefix, that.namePrefix) + && Objects.equals(indices, that.indices) + && Objects.equals(autoShardingEvent, that.autoShardingEvent); + } + + @Override + public int hashCode() { + return Objects.hash(namePrefix, indices, rolloverOnWrite, autoShardingEvent); + } + + public static class Builder { + private final String namePrefix; + private List indices; + private boolean rolloverOnWrite = false; + @Nullable + private DataStreamAutoShardingEvent autoShardingEvent = null; + + private Builder(String namePrefix, List indices) { + this.namePrefix = namePrefix; + this.indices = indices; + } + + private Builder(DataStreamIndices dataStreamIndices) { + this.namePrefix = dataStreamIndices.namePrefix; + this.indices = dataStreamIndices.indices; + this.rolloverOnWrite = dataStreamIndices.rolloverOnWrite; + this.autoShardingEvent = dataStreamIndices.autoShardingEvent; + } + + /** + * Set the list of indices. We always create an immutable copy as that's what the constructor expects. + */ + public Builder setIndices(List indices) { + this.indices = List.copyOf(indices); + return this; + } + + public Builder setRolloverOnWrite(boolean rolloverOnWrite) { + this.rolloverOnWrite = rolloverOnWrite; + return this; + } + + public Builder setAutoShardingEvent(DataStreamAutoShardingEvent autoShardingEvent) { + this.autoShardingEvent = autoShardingEvent; + return this; + } + + public DataStreamIndices build() { + return new DataStreamIndices(namePrefix, indices, rolloverOnWrite, autoShardingEvent); + } + } + } + public static class Builder { private LongSupplier timeProvider = System::currentTimeMillis; private String name; - private List indices; private long generation = 1; @Nullable private Map metadata = null; @@ -1364,22 +1502,23 @@ public static class Builder { private IndexMode indexMode = null; @Nullable private DataStreamLifecycle lifecycle = null; - private boolean rolloverOnWrite = false; private boolean failureStoreEnabled = false; - private List failureIndices = List.of(); - @Nullable - private DataStreamAutoShardingEvent autoShardingEvent = null; + private DataStreamIndices backingIndices; + private DataStreamIndices failureIndices = DataStreamIndices.failureIndicesBuilder(List.of()).build(); - public Builder(String name, List indices) { + private Builder(String name, List indices) { + this(name, DataStreamIndices.backingIndicesBuilder(indices).build()); + } + + private Builder(String name, DataStreamIndices backingIndices) { this.name = name; - assert indices.isEmpty() == false : "Cannot create data stream with empty backing indices"; - this.indices = indices; + assert backingIndices.indices.isEmpty() == false : "Cannot create data stream with empty backing indices"; + this.backingIndices = backingIndices; } - public Builder(DataStream dataStream) { + private Builder(DataStream dataStream) { timeProvider = dataStream.timeProvider; name = dataStream.name; - indices = dataStream.indices; generation = dataStream.generation; metadata = dataStream.metadata; hidden = dataStream.hidden; @@ -1388,10 +1527,9 @@ public Builder(DataStream dataStream) { allowCustomRouting = dataStream.allowCustomRouting; indexMode = dataStream.indexMode; lifecycle = dataStream.lifecycle; - rolloverOnWrite = dataStream.rolloverOnWrite; failureStoreEnabled = dataStream.failureStoreEnabled; + backingIndices = dataStream.backingIndices; failureIndices = dataStream.failureIndices; - autoShardingEvent = dataStream.autoShardingEvent; } public Builder setTimeProvider(LongSupplier timeProvider) { @@ -1404,12 +1542,6 @@ public Builder setName(String name) { return this; } - public Builder setIndices(List indices) { - assert indices.isEmpty() == false : "Cannot create data stream with empty backing indices"; - this.indices = indices; - return this; - } - public Builder setGeneration(long generation) { this.generation = generation; return this; @@ -1450,30 +1582,34 @@ public Builder setLifecycle(DataStreamLifecycle lifecycle) { return this; } - public Builder setRolloverOnWrite(boolean rolloverOnWrite) { - this.rolloverOnWrite = rolloverOnWrite; + public Builder setFailureStoreEnabled(boolean failureStoreEnabled) { + this.failureStoreEnabled = failureStoreEnabled; return this; } - public Builder setFailureStoreEnabled(boolean failureStoreEnabled) { - this.failureStoreEnabled = failureStoreEnabled; + public Builder setBackingIndices(DataStreamIndices backingIndices) { + assert backingIndices.indices.isEmpty() == false : "Cannot create data stream with empty backing indices"; + this.backingIndices = backingIndices; return this; } - public Builder setFailureIndices(List failureIndices) { + public Builder setFailureIndices(DataStreamIndices failureIndices) { this.failureIndices = failureIndices; return this; } - public Builder setAutoShardingEvent(DataStreamAutoShardingEvent autoShardingEvent) { - this.autoShardingEvent = autoShardingEvent; + public Builder setDataStreamIndices(boolean targetFailureStore, DataStreamIndices indices) { + if (targetFailureStore) { + setFailureIndices(indices); + } else { + setBackingIndices(indices); + } return this; } public DataStream build() { return new DataStream( name, - indices, generation, metadata, hidden, @@ -1484,9 +1620,8 @@ public DataStream build() { indexMode, lifecycle, failureStoreEnabled, - failureIndices, - rolloverOnWrite, - autoShardingEvent + backingIndices, + failureIndices ); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamMetadata.java index fef9ebe993a4..c65f83eca0aa 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamMetadata.java @@ -89,7 +89,7 @@ public DataStreamMetadata( public DataStreamMetadata(StreamInput in) throws IOException { this( - in.readImmutableOpenMap(StreamInput::readString, DataStream::new), + in.readImmutableOpenMap(StreamInput::readString, DataStream::read), in.readImmutableOpenMap(StreamInput::readString, DataStreamAlias::new) ); } @@ -265,7 +265,7 @@ public String toString() { static class DataStreamMetadataDiff implements NamedDiff { private static final DiffableUtils.DiffableValueReader DS_DIFF_READER = new DiffableUtils.DiffableValueReader<>( - DataStream::new, + DataStream::read, DataStream::readDiffFrom ); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index effc89d8e535..8bc8f9d96bf2 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -425,7 +425,7 @@ private static void resolveIndicesForDataStream(Context context, DataStream data if (shouldIncludeFailureIndices(context.getOptions(), dataStream)) { // We short-circuit here, if failure indices are not allowed and they can be skipped if (context.getOptions().allowFailureIndices() || context.getOptions().ignoreUnavailable() == false) { - for (Index index : dataStream.getFailureIndices()) { + for (Index index : dataStream.getFailureIndices().getIndices()) { if (shouldTrackConcreteIndex(context, context.getOptions(), index)) { concreteIndicesResult.add(index); } @@ -470,7 +470,7 @@ private static boolean resolvesToMoreThanOneIndex(IndexAbstraction indexAbstract count += dataStream.getIndices().size(); } if (shouldIncludeFailureIndices(context.getOptions(), dataStream)) { - count += dataStream.getFailureIndices().size(); + count += dataStream.getFailureIndices().getIndices().size(); } return count > 1; } @@ -1431,7 +1431,7 @@ && shouldIncludeFailureIndices(context.getOptions(), (DataStream) indexAbstracti DataStream dataStream = (DataStream) indexAbstraction; indicesStateStream = Stream.concat( indicesStateStream, - dataStream.getFailureIndices().stream().map(context.state.metadata()::index) + dataStream.getFailureIndices().getIndices().stream().map(context.state.metadata()::index) ); } if (excludeState != null) { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java index fec209960597..e25c12d0c2ad 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java @@ -2600,7 +2600,10 @@ private static boolean assertContainsIndexIfDataStream(DataStream parent, IndexM || parent.getIndices().stream().anyMatch(index -> indexMetadata.getIndex().getName().equals(index.getName())) || (DataStream.isFailureStoreFeatureFlagEnabled() && parent.isFailureStoreEnabled() - && parent.getFailureIndices().stream().anyMatch(index -> indexMetadata.getIndex().getName().equals(index.getName()))) + && parent.getFailureIndices() + .getIndices() + .stream() + .anyMatch(index -> indexMetadata.getIndex().getName().equals(index.getName()))) : "Expected data stream [" + parent.getName() + "] to contain index " + indexMetadata.getIndex(); return true; } @@ -2623,7 +2626,7 @@ private static void collectDataStreams( indexToDataStreamLookup.put(i.getName(), dataStream); } if (DataStream.isFailureStoreFeatureFlagEnabled() && dataStream.isFailureStoreEnabled()) { - for (Index i : dataStream.getFailureIndices()) { + for (Index i : dataStream.getFailureIndices().getIndices()) { indexToDataStreamLookup.put(i.getName(), dataStream); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java index 2d1d38ac926d..1062f741cf0b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java @@ -111,8 +111,8 @@ public ClusterState execute(ClusterState currentState) throws Exception { ); DataStream createdDataStream = clusterState.metadata().dataStreams().get(request.name); firstBackingIndexRef.set(createdDataStream.getIndices().get(0).getName()); - if (createdDataStream.getFailureIndices().isEmpty() == false) { - firstFailureStoreRef.set(createdDataStream.getFailureIndices().get(0).getName()); + if (createdDataStream.getFailureIndices().getIndices().isEmpty() == false) { + firstFailureStoreRef.set(createdDataStream.getFailureIndices().getIndices().get(0).getName()); } return clusterState; } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java index a018f3d93a9b..7363e71d65c7 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java @@ -77,7 +77,12 @@ public Tuple executeTask( ClusterState clusterState ) { return new Tuple<>( - setRolloverOnWrite(clusterState, setRolloverOnWriteTask.getDataStreamName(), setRolloverOnWriteTask.rolloverOnWrite()), + setRolloverOnWrite( + clusterState, + setRolloverOnWriteTask.getDataStreamName(), + setRolloverOnWriteTask.rolloverOnWrite(), + setRolloverOnWriteTask.targetFailureStore() + ), setRolloverOnWriteTask ); } @@ -152,13 +157,14 @@ private void submitUnbatchedTask(@SuppressWarnings("SameParameterValue") String public void setRolloverOnWrite( String dataStreamName, boolean rolloverOnWrite, + boolean targetFailureStore, TimeValue ackTimeout, TimeValue masterTimeout, ActionListener listener ) { setRolloverOnWriteTaskQueue.submitTask( "set-rollover-on-write", - new SetRolloverOnWriteTask(dataStreamName, rolloverOnWrite, ackTimeout, listener), + new SetRolloverOnWriteTask(dataStreamName, rolloverOnWrite, targetFailureStore, ackTimeout, listener), masterTimeout ); } @@ -230,16 +236,25 @@ ClusterState updateDataLifecycle(ClusterState currentState, List dataStr * @param currentState the initial cluster state * @param dataStreamName the name of the data stream to be updated * @param rolloverOnWrite the value of the flag + * @param targetFailureStore whether this rollover targets the failure store or the backing indices * @return the updated cluster state */ - public static ClusterState setRolloverOnWrite(ClusterState currentState, String dataStreamName, boolean rolloverOnWrite) { + public static ClusterState setRolloverOnWrite( + ClusterState currentState, + String dataStreamName, + boolean rolloverOnWrite, + boolean targetFailureStore + ) { Metadata metadata = currentState.metadata(); var dataStream = validateDataStream(metadata, dataStreamName); - if (dataStream.rolloverOnWrite() == rolloverOnWrite) { + var indices = dataStream.getDataStreamIndices(targetFailureStore); + if (indices.isRolloverOnWrite() == rolloverOnWrite) { return currentState; } Metadata.Builder builder = Metadata.builder(metadata); - builder.put(dataStream.copy().setRolloverOnWrite(rolloverOnWrite).build()); + builder.put( + dataStream.copy().setDataStreamIndices(targetFailureStore, indices.copy().setRolloverOnWrite(rolloverOnWrite).build()).build() + ); return ClusterState.builder(currentState).metadata(builder.build()).build(); } @@ -286,7 +301,7 @@ private static void removeBackingIndex( ) { boolean indexNotRemoved = true; DataStream dataStream = validateDataStream(metadata, dataStreamName); - List targetIndices = failureStore ? dataStream.getFailureIndices() : dataStream.getIndices(); + List targetIndices = failureStore ? dataStream.getFailureIndices().getIndices() : dataStream.getIndices(); for (Index backingIndex : targetIndices) { if (backingIndex.getName().equals(indexName)) { if (failureStore) { @@ -365,16 +380,19 @@ static class SetRolloverOnWriteTask extends AckedBatchedClusterStateUpdateTask { private final String dataStreamName; private final boolean rolloverOnWrite; + private final boolean targetFailureStore; SetRolloverOnWriteTask( String dataStreamName, boolean rolloverOnWrite, + boolean targetFailureStore, TimeValue ackTimeout, ActionListener listener ) { super(ackTimeout, listener); this.dataStreamName = dataStreamName; this.rolloverOnWrite = rolloverOnWrite; + this.targetFailureStore = targetFailureStore; } public String getDataStreamName() { @@ -384,5 +402,9 @@ public String getDataStreamName() { public boolean rolloverOnWrite() { return rolloverOnWrite; } + + public boolean targetFailureStore() { + return targetFailureStore; + } } } diff --git a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java index 5cabe2238952..fa6ea9c6519d 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -704,7 +704,10 @@ static DataStream updateDataStream(DataStream dataStream, Metadata.Builder metad .stream() .map(i -> metadata.get(renameIndex(i.getName(), request, true)).getIndex()) .toList(); - return dataStream.copy().setName(dataStreamName).setIndices(updatedIndices).build(); + return dataStream.copy() + .setName(dataStreamName) + .setBackingIndices(dataStream.getBackingIndices().copy().setIndices(updatedIndices).build()) + .build(); } public static RestoreInProgress updateRestoreStateWithDeletedIndices(RestoreInProgress oldRestore, Set deletedIndices) { diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java index 149752578e1e..c2edf9729b8b 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java @@ -682,7 +682,9 @@ public void testRolloverClusterStateForDataStreamFailureStore() throws Exception Metadata.Builder builder = Metadata.builder(); builder.put("template", template); dataStream.getIndices().forEach(index -> builder.put(DataStreamTestHelper.getIndexMetadataBuilderForIndex(index))); - dataStream.getFailureIndices().forEach(index -> builder.put(DataStreamTestHelper.getIndexMetadataBuilderForIndex(index))); + dataStream.getFailureIndices() + .getIndices() + .forEach(index -> builder.put(DataStreamTestHelper.getIndexMetadataBuilderForIndex(index))); builder.put(dataStream); final ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metadata(builder).build(); final TestTelemetryPlugin telemetryPlugin = new TestTelemetryPlugin(); @@ -723,15 +725,18 @@ public void testRolloverClusterStateForDataStreamFailureStore() throws Exception assertEquals(sourceIndexName, rolloverResult.sourceIndexName()); assertEquals(newIndexName, rolloverResult.rolloverIndexName()); Metadata rolloverMetadata = rolloverResult.clusterState().metadata(); - assertEquals(dataStream.getIndices().size() + dataStream.getFailureIndices().size() + 1, rolloverMetadata.indices().size()); + assertEquals( + dataStream.getIndices().size() + dataStream.getFailureIndices().getIndices().size() + 1, + rolloverMetadata.indices().size() + ); IndexMetadata rolloverIndexMetadata = rolloverMetadata.index(newIndexName); var ds = (DataStream) rolloverMetadata.getIndicesLookup().get(dataStream.getName()); assertThat(ds.getType(), equalTo(IndexAbstraction.Type.DATA_STREAM)); assertThat(ds.getIndices(), hasSize(dataStream.getIndices().size())); - assertThat(ds.getFailureIndices(), hasSize(dataStream.getFailureIndices().size() + 1)); - assertThat(ds.getFailureIndices(), hasItem(rolloverMetadata.index(sourceIndexName).getIndex())); - assertThat(ds.getFailureIndices(), hasItem(rolloverIndexMetadata.getIndex())); + assertThat(ds.getFailureIndices().getIndices(), hasSize(dataStream.getFailureIndices().getIndices().size() + 1)); + assertThat(ds.getFailureIndices().getIndices(), hasItem(rolloverMetadata.index(sourceIndexName).getIndex())); + assertThat(ds.getFailureIndices().getIndices(), hasItem(rolloverIndexMetadata.getIndex())); assertThat(ds.getFailureStoreWriteIndex(), equalTo(rolloverIndexMetadata.getIndex())); RolloverInfo info = rolloverMetadata.index(sourceIndexName).getRolloverInfos().get(dataStream.getName()); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java index 42c4dec3e219..9dbabe2c4189 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java @@ -440,12 +440,13 @@ public void testLazyRollover() throws Exception { doAnswer(invocation -> { Object[] args = invocation.getArguments(); - assert args.length == 5; + assert args.length == 6; @SuppressWarnings("unchecked") - ActionListener listener = (ActionListener) args[4]; + ActionListener listener = (ActionListener) args[5]; listener.onResponse(AcknowledgedResponse.TRUE); return null; - }).when(mockMetadataDataStreamService).setRolloverOnWrite(eq(dataStream.getName()), eq(true), any(), any(), anyActionListener()); + }).when(mockMetadataDataStreamService) + .setRolloverOnWrite(eq(dataStream.getName()), eq(true), eq(false), any(), any(), anyActionListener()); final TransportRolloverAction transportRolloverAction = new TransportRolloverAction( mock(TransportService.class), diff --git a/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java b/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java index 9803082bbd88..8bc2a978af0c 100644 --- a/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java +++ b/server/src/test/java/org/elasticsearch/action/datastreams/autosharding/DataStreamAutoShardingServiceTests.java @@ -768,10 +768,10 @@ private DataStream createDataStream( builder.put(indexMetadata, false); backingIndices.add(indexMetadata.getIndex()); } - return DataStream.builder(dataStreamName, backingIndices) - .setGeneration(backingIndicesCount) - .setAutoShardingEvent(autoShardingEvent) - .build(); + return DataStream.builder( + dataStreamName, + DataStream.DataStreamIndices.backingIndicesBuilder(backingIndices).setAutoShardingEvent(autoShardingEvent).build() + ).setGeneration(backingIndicesCount).build(); } private IndexMetadata createIndexMetadata( diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java index d42b6096b6e3..87fe732d156c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java @@ -73,7 +73,7 @@ protected DataStream doParseInstance(XContentParser parser) throws IOException { @Override protected Writeable.Reader instanceReader() { - return DataStream::new; + return DataStream::read; } @Override @@ -94,10 +94,12 @@ protected DataStream mutateInstance(DataStream instance) { var indexMode = instance.getIndexMode(); var lifecycle = instance.getLifecycle(); var failureStore = instance.isFailureStoreEnabled(); - var failureIndices = instance.getFailureIndices(); + var failureIndices = instance.getFailureIndices().getIndices(); var rolloverOnWrite = instance.rolloverOnWrite(); var autoShardingEvent = instance.getAutoShardingEvent(); - switch (between(0, 12)) { + var failureRolloverOnWrite = instance.getFailureIndices().isRolloverOnWrite(); + var failureAutoShardingEvent = instance.getBackingIndices().getAutoShardingEvent(); + switch (between(0, 14)) { case 0 -> name = randomAlphaOfLength(10); case 1 -> indices = randomNonEmptyIndexInstances(); case 2 -> generation = instance.getGeneration() + randomIntBetween(1, 10); @@ -114,6 +116,7 @@ protected DataStream mutateInstance(DataStream instance) { isReplicated = isReplicated == false; // Replicated data streams cannot be marked for lazy rollover. rolloverOnWrite = isReplicated == false && rolloverOnWrite; + failureRolloverOnWrite = isReplicated == false && failureRolloverOnWrite; } case 6 -> { if (isSystem == false) { @@ -139,7 +142,27 @@ protected DataStream mutateInstance(DataStream instance) { isReplicated = rolloverOnWrite == false && isReplicated; } case 12 -> { - autoShardingEvent = randomBoolean() && autoShardingEvent != null + if (randomBoolean() || autoShardingEvent == null) { + // If we're mutating the auto sharding event of the failure store, we need to ensure there's at least one failure index. + if (failureIndices.isEmpty()) { + failureIndices = DataStreamTestHelper.randomIndexInstances(); + failureStore = true; + } + autoShardingEvent = new DataStreamAutoShardingEvent( + failureIndices.get(failureIndices.size() - 1).getName(), + randomIntBetween(1, 10), + randomMillisUpToYear9999() + ); + } else { + autoShardingEvent = null; + } + } + case 13 -> { + failureRolloverOnWrite = failureRolloverOnWrite == false; + isReplicated = failureRolloverOnWrite == false && isReplicated; + } + case 14 -> { + failureAutoShardingEvent = randomBoolean() && failureAutoShardingEvent != null ? null : new DataStreamAutoShardingEvent( indices.get(indices.size() - 1).getName(), @@ -151,25 +174,29 @@ protected DataStream mutateInstance(DataStream instance) { return new DataStream( name, - indices, generation, metadata, isHidden, isReplicated, isSystem, + System::currentTimeMillis, allowsCustomRouting, indexMode, lifecycle, failureStore, - failureIndices, - rolloverOnWrite, - autoShardingEvent + new DataStream.DataStreamIndices(DataStream.BACKING_INDEX_PREFIX, indices, rolloverOnWrite, autoShardingEvent), + new DataStream.DataStreamIndices( + DataStream.BACKING_INDEX_PREFIX, + failureIndices, + failureRolloverOnWrite, + failureAutoShardingEvent + ) ); } public void testRollover() { DataStream ds = DataStreamTestHelper.randomInstance().promoteDataStream(); - Tuple newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA); + Tuple newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA, ds.getBackingIndices()); final DataStream rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2(), false, null); assertThat(rolledDs.getName(), equalTo(ds.getName())); assertThat(rolledDs.getGeneration(), equalTo(ds.getGeneration() + 1)); @@ -196,7 +223,7 @@ public void testRolloverWithConflictingBackingIndexName() { builder.put(im, false); } - final Tuple newCoordinates = ds.nextWriteIndexAndGeneration(builder.build()); + final Tuple newCoordinates = ds.nextWriteIndexAndGeneration(builder.build(), ds.getBackingIndices()); final DataStream rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2(), false, null); assertThat(rolledDs.getName(), equalTo(ds.getName())); assertThat(rolledDs.getGeneration(), equalTo(ds.getGeneration() + numConflictingIndices + 1)); @@ -212,7 +239,7 @@ public void testRolloverUpgradeToTsdbDataStream() { .setReplicated(false) .setIndexMode(randomBoolean() ? IndexMode.STANDARD : null) .build(); - var newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA); + var newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA, ds.getBackingIndices()); var rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2(), true, null); assertThat(rolledDs.getName(), equalTo(ds.getName())); @@ -225,7 +252,7 @@ public void testRolloverUpgradeToTsdbDataStream() { public void testRolloverDowngradeToRegularDataStream() { DataStream ds = DataStreamTestHelper.randomInstance().copy().setReplicated(false).setIndexMode(IndexMode.TIME_SERIES).build(); - var newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA); + var newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA, ds.getBackingIndices()); var rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2(), false, null); assertThat(rolledDs.getName(), equalTo(ds.getName())); @@ -238,18 +265,18 @@ public void testRolloverDowngradeToRegularDataStream() { public void testRolloverFailureStore() { DataStream ds = DataStreamTestHelper.randomInstance(true).promoteDataStream(); - Tuple newCoordinates = ds.nextFailureStoreWriteIndexAndGeneration(Metadata.EMPTY_METADATA); + Tuple newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA, ds.getFailureIndices()); final DataStream rolledDs = ds.rolloverFailureStore(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2()); assertThat(rolledDs.getName(), equalTo(ds.getName())); assertThat(rolledDs.getGeneration(), equalTo(ds.getGeneration() + 1)); assertThat(rolledDs.getIndices().size(), equalTo(ds.getIndices().size())); // Ensure that the rolloverOnWrite flag hasn't changed when rolling over a failure store. assertThat(rolledDs.rolloverOnWrite(), equalTo(ds.rolloverOnWrite())); - assertThat(rolledDs.getFailureIndices().size(), equalTo(ds.getFailureIndices().size() + 1)); + assertThat(rolledDs.getFailureIndices().getIndices().size(), equalTo(ds.getFailureIndices().getIndices().size() + 1)); assertTrue(rolledDs.getIndices().containsAll(ds.getIndices())); assertTrue(rolledDs.getIndices().contains(rolledDs.getWriteIndex())); - assertTrue(rolledDs.getFailureIndices().containsAll(ds.getFailureIndices())); - assertTrue(rolledDs.getFailureIndices().contains(rolledDs.getFailureStoreWriteIndex())); + assertTrue(rolledDs.getFailureIndices().getIndices().containsAll(ds.getFailureIndices().getIndices())); + assertTrue(rolledDs.getFailureIndices().getIndices().contains(rolledDs.getFailureStoreWriteIndex())); } public void testRemoveBackingIndex() { @@ -298,15 +325,18 @@ public void testRemoveBackingWriteIndex() { public void testRemoveFailureStoreIndex() { DataStream original = createRandomDataStream(); - int indexToRemove = randomIntBetween(1, original.getFailureIndices().size() - 1); + int indexToRemove = randomIntBetween(1, original.getFailureIndices().getIndices().size() - 1); - DataStream updated = original.removeFailureStoreIndex(original.getFailureIndices().get(indexToRemove - 1)); + DataStream updated = original.removeFailureStoreIndex(original.getFailureIndices().getIndices().get(indexToRemove - 1)); assertThat(updated.getName(), equalTo(original.getName())); assertThat(updated.getGeneration(), equalTo(original.getGeneration() + 1)); assertThat(updated.getIndices().size(), equalTo(original.getIndices().size())); - assertThat(updated.getFailureIndices().size(), equalTo(original.getFailureIndices().size() - 1)); - for (int k = 0; k < (original.getFailureIndices().size() - 1); k++) { - assertThat(updated.getFailureIndices().get(k), equalTo(original.getFailureIndices().get(k < (indexToRemove - 1) ? k : k + 1))); + assertThat(updated.getFailureIndices().getIndices().size(), equalTo(original.getFailureIndices().getIndices().size() - 1)); + for (int k = 0; k < (original.getFailureIndices().getIndices().size() - 1); k++) { + assertThat( + updated.getFailureIndices().getIndices().get(k), + equalTo(original.getFailureIndices().getIndices().get(k < (indexToRemove - 1) ? k : k + 1)) + ); } } @@ -326,7 +356,9 @@ public void testRemoveFailureStoreWriteIndex() { IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> original.removeFailureStoreIndex(original.getFailureIndices().get(original.getFailureIndices().size() - 1)) + () -> original.removeFailureStoreIndex( + original.getFailureIndices().getIndices().get(original.getFailureIndices().getIndices().size() - 1) + ) ); assertThat( e.getMessage(), @@ -334,7 +366,7 @@ public void testRemoveFailureStoreWriteIndex() { String.format( Locale.ROOT, "cannot remove backing index [%s] of data stream [%s] because it is the write index", - original.getFailureIndices().get(original.getFailureIndices().size() - 1).getName(), + original.getFailureIndices().getIndices().get(original.getFailureIndices().getIndices().size() - 1).getName(), original.getName() ) ) @@ -379,9 +411,9 @@ public void testAddBackingIndexThatIsPartOfAnotherDataStream() { builder.put(ds2); createMetadataForIndices(builder, ds1.getIndices()); - createMetadataForIndices(builder, ds1.getFailureIndices()); + createMetadataForIndices(builder, ds1.getFailureIndices().getIndices()); createMetadataForIndices(builder, ds2.getIndices()); - createMetadataForIndices(builder, ds2.getFailureIndices()); + createMetadataForIndices(builder, ds2.getFailureIndices().getIndices()); Index indexToAdd = randomFrom(ds2.getIndices().toArray(Index.EMPTY_ARRAY)); @@ -409,11 +441,11 @@ public void testAddBackingIndexThatIsPartOfDataStreamFailureStore() { builder.put(ds2); createMetadataForIndices(builder, ds1.getIndices()); - createMetadataForIndices(builder, ds1.getFailureIndices()); + createMetadataForIndices(builder, ds1.getFailureIndices().getIndices()); createMetadataForIndices(builder, ds2.getIndices()); - createMetadataForIndices(builder, ds2.getFailureIndices()); + createMetadataForIndices(builder, ds2.getFailureIndices().getIndices()); - Index indexToAdd = randomFrom(ds2.getFailureIndices().toArray(Index.EMPTY_ARRAY)); + Index indexToAdd = randomFrom(ds2.getFailureIndices().getIndices().toArray(Index.EMPTY_ARRAY)); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> ds1.addBackingIndex(builder.build(), indexToAdd)); assertThat( @@ -498,7 +530,7 @@ public void testAddFailureStoreIndex() { builder.put(original); createMetadataForIndices(builder, original.getIndices()); - createMetadataForIndices(builder, original.getFailureIndices()); + createMetadataForIndices(builder, original.getFailureIndices().getIndices()); Index indexToAdd = new Index(randomAlphaOfLength(4), UUIDs.randomBase64UUID(random())); builder.put( @@ -514,11 +546,11 @@ public void testAddFailureStoreIndex() { assertThat(updated.getName(), equalTo(original.getName())); assertThat(updated.getGeneration(), equalTo(original.getGeneration() + 1)); assertThat(updated.getIndices().size(), equalTo(original.getIndices().size())); - assertThat(updated.getFailureIndices().size(), equalTo(original.getFailureIndices().size() + 1)); - for (int k = 1; k <= original.getFailureIndices().size(); k++) { - assertThat(updated.getFailureIndices().get(k), equalTo(original.getFailureIndices().get(k - 1))); + assertThat(updated.getFailureIndices().getIndices().size(), equalTo(original.getFailureIndices().getIndices().size() + 1)); + for (int k = 1; k <= original.getFailureIndices().getIndices().size(); k++) { + assertThat(updated.getFailureIndices().getIndices().get(k), equalTo(original.getFailureIndices().getIndices().get(k - 1))); } - assertThat(updated.getFailureIndices().get(0), equalTo(indexToAdd)); + assertThat(updated.getFailureIndices().getIndices().get(0), equalTo(indexToAdd)); } public void testAddFailureStoreIndexThatIsPartOfAnotherDataStream() { @@ -530,11 +562,11 @@ public void testAddFailureStoreIndexThatIsPartOfAnotherDataStream() { builder.put(ds2); createMetadataForIndices(builder, ds1.getIndices()); - createMetadataForIndices(builder, ds1.getFailureIndices()); + createMetadataForIndices(builder, ds1.getFailureIndices().getIndices()); createMetadataForIndices(builder, ds2.getIndices()); - createMetadataForIndices(builder, ds2.getFailureIndices()); + createMetadataForIndices(builder, ds2.getFailureIndices().getIndices()); - Index indexToAdd = randomFrom(ds2.getFailureIndices().toArray(Index.EMPTY_ARRAY)); + Index indexToAdd = randomFrom(ds2.getFailureIndices().getIndices().toArray(Index.EMPTY_ARRAY)); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, @@ -563,9 +595,9 @@ public void testAddFailureStoreIndexThatIsPartOfDataStreamBackingIndices() { builder.put(ds2); createMetadataForIndices(builder, ds1.getIndices()); - createMetadataForIndices(builder, ds1.getFailureIndices()); + createMetadataForIndices(builder, ds1.getFailureIndices().getIndices()); createMetadataForIndices(builder, ds2.getIndices()); - createMetadataForIndices(builder, ds2.getFailureIndices()); + createMetadataForIndices(builder, ds2.getFailureIndices().getIndices()); Index indexToAdd = randomFrom(ds2.getIndices().toArray(Index.EMPTY_ARRAY)); @@ -594,16 +626,16 @@ public void testAddExistingFailureStoreIndex() { builder.put(original); createMetadataForIndices(builder, original.getIndices()); - createMetadataForIndices(builder, original.getFailureIndices()); + createMetadataForIndices(builder, original.getFailureIndices().getIndices()); - Index indexToAdd = randomFrom(original.getFailureIndices().toArray(Index.EMPTY_ARRAY)); + Index indexToAdd = randomFrom(original.getFailureIndices().getIndices().toArray(Index.EMPTY_ARRAY)); DataStream updated = original.addFailureStoreIndex(builder.build(), indexToAdd); assertThat(updated.getName(), equalTo(original.getName())); assertThat(updated.getGeneration(), equalTo(original.getGeneration())); assertThat(updated.getIndices().size(), equalTo(original.getIndices().size())); - assertThat(updated.getFailureIndices().size(), equalTo(original.getFailureIndices().size())); - assertThat(updated.getFailureIndices(), equalTo(original.getFailureIndices())); + assertThat(updated.getFailureIndices().getIndices().size(), equalTo(original.getFailureIndices().getIndices().size())); + assertThat(updated.getFailureIndices().getIndices(), equalTo(original.getFailureIndices().getIndices())); } public void testAddFailureStoreIndexWithAliases() { @@ -613,7 +645,7 @@ public void testAddFailureStoreIndexWithAliases() { builder.put(original); createMetadataForIndices(builder, original.getIndices()); - createMetadataForIndices(builder, original.getFailureIndices()); + createMetadataForIndices(builder, original.getFailureIndices().getIndices()); Index indexToAdd = new Index(randomAlphaOfLength(4), UUIDs.randomBase64UUID(random())); IndexMetadata.Builder b = IndexMetadata.builder(indexToAdd.getName()) @@ -743,11 +775,16 @@ public void testSnapshot() { var replicated = preSnapshotDataStream.isReplicated() && randomBoolean(); var postSnapshotDataStream = preSnapshotDataStream.copy() - .setIndices(postSnapshotIndices) + .setBackingIndices( + preSnapshotDataStream.getBackingIndices() + .copy() + .setIndices(postSnapshotIndices) + .setRolloverOnWrite(replicated == false && preSnapshotDataStream.rolloverOnWrite()) + .build() + ) .setGeneration(preSnapshotDataStream.getGeneration() + randomIntBetween(0, 5)) .setMetadata(preSnapshotDataStream.getMetadata() == null ? null : new HashMap<>(preSnapshotDataStream.getMetadata())) .setReplicated(replicated) - .setRolloverOnWrite(replicated == false && preSnapshotDataStream.rolloverOnWrite()) .build(); var reconciledDataStream = postSnapshotDataStream.snapshot( @@ -775,7 +812,9 @@ public void testSnapshotWithAllBackingIndicesRemoved() { var preSnapshotDataStream = DataStreamTestHelper.randomInstance(); var indicesToAdd = randomNonEmptyIndexInstances(); - var postSnapshotDataStream = preSnapshotDataStream.copy().setIndices(indicesToAdd).build(); + var postSnapshotDataStream = preSnapshotDataStream.copy() + .setBackingIndices(preSnapshotDataStream.getBackingIndices().copy().setIndices(indicesToAdd).build()) + .build(); assertNull(postSnapshotDataStream.snapshot(preSnapshotDataStream.getIndices().stream().map(Index::getName).toList())); } @@ -1769,7 +1808,6 @@ public void testXContentSerializationWithRolloverAndEffectiveRetention() throws isSystem, randomBoolean(), isSystem, - System::currentTimeMillis, randomBoolean(), randomBoolean() ? IndexMode.STANDARD : null, // IndexMode.TIME_SERIES triggers validation that many unit tests doesn't pass lifecycle, @@ -1958,12 +1996,11 @@ public void testWriteFailureIndex() { hidden, replicated, system, - System::currentTimeMillis, randomBoolean(), randomBoolean() ? IndexMode.STANDARD : IndexMode.TIME_SERIES, DataStreamLifecycleTests.randomLifecycle(), false, - null, + List.of(), replicated == false && randomBoolean(), null ); @@ -1977,7 +2014,6 @@ public void testWriteFailureIndex() { hidden, replicated, system, - System::currentTimeMillis, randomBoolean(), randomBoolean() ? IndexMode.STANDARD : IndexMode.TIME_SERIES, DataStreamLifecycleTests.randomLifecycle(), @@ -2003,7 +2039,6 @@ public void testWriteFailureIndex() { hidden, replicated, system, - System::currentTimeMillis, randomBoolean(), randomBoolean() ? IndexMode.STANDARD : IndexMode.TIME_SERIES, DataStreamLifecycleTests.randomLifecycle(), @@ -2028,12 +2063,11 @@ public void testIsFailureIndex() { hidden, replicated, system, - System::currentTimeMillis, randomBoolean(), randomBoolean() ? IndexMode.STANDARD : IndexMode.TIME_SERIES, DataStreamLifecycleTests.randomLifecycle(), false, - null, + List.of(), replicated == false && randomBoolean(), null ); @@ -2051,7 +2085,6 @@ public void testIsFailureIndex() { hidden, replicated, system, - System::currentTimeMillis, randomBoolean(), randomBoolean() ? IndexMode.STANDARD : IndexMode.TIME_SERIES, DataStreamLifecycleTests.randomLifecycle(), @@ -2083,7 +2116,6 @@ public void testIsFailureIndex() { hidden, replicated, system, - System::currentTimeMillis, randomBoolean(), randomBoolean() ? IndexMode.STANDARD : IndexMode.TIME_SERIES, DataStreamLifecycleTests.randomLifecycle(), diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java index 9a560abe20c7..d4639c3d3118 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java @@ -357,7 +357,12 @@ public void testRemoveBrokenBackingIndexReference() { var state = DataStreamTestHelper.getClusterStateWithDataStreams(List.of(new Tuple<>(dataStreamName, 2)), List.of()); var original = state.getMetadata().dataStreams().get(dataStreamName); var broken = original.copy() - .setIndices(List.of(new Index(original.getIndices().get(0).getName(), "broken"), original.getIndices().get(1))) + .setBackingIndices( + original.getBackingIndices() + .copy() + .setIndices(List.of(new Index(original.getIndices().get(0).getName(), "broken"), original.getIndices().get(1))) + .build() + ) .build(); var brokenState = ClusterState.builder(state).metadata(Metadata.builder(state.getMetadata()).put(broken).build()).build(); diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java index bbbafef514e3..c78ed54c13d8 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java @@ -131,13 +131,10 @@ public static DataStream newInstance( @Nullable DataStreamLifecycle lifecycle, @Nullable DataStreamAutoShardingEvent autoShardingEvent ) { - return DataStream.builder(name, indices) - .setGeneration(generation) - .setMetadata(metadata) - .setReplicated(replicated) - .setLifecycle(lifecycle) - .setAutoShardingEvent(autoShardingEvent) - .build(); + return DataStream.builder( + name, + DataStream.DataStreamIndices.backingIndicesBuilder(indices).setAutoShardingEvent(autoShardingEvent).build() + ).setGeneration(generation).setMetadata(metadata).setReplicated(replicated).setLifecycle(lifecycle).build(); } public static DataStream newInstance( @@ -155,7 +152,7 @@ public static DataStream newInstance( .setReplicated(replicated) .setLifecycle(lifecycle) .setFailureStoreEnabled(failureStores.isEmpty() == false) - .setFailureIndices(failureStores) + .setFailureIndices(DataStream.DataStreamIndices.failureIndicesBuilder(failureStores).build()) .build(); } @@ -341,7 +338,6 @@ public static DataStream randomInstance(String dataStreamName, LongSupplier time boolean replicated = randomBoolean(); return new DataStream( dataStreamName, - indices, generation, metadata, randomBoolean(), @@ -352,15 +348,30 @@ public static DataStream randomInstance(String dataStreamName, LongSupplier time randomBoolean() ? IndexMode.STANDARD : null, // IndexMode.TIME_SERIES triggers validation that many unit tests doesn't pass randomBoolean() ? DataStreamLifecycle.newBuilder().dataRetention(randomMillisUpToYear9999()).build() : null, failureStore, - failureIndices, - replicated == false && randomBoolean(), - randomBoolean() - ? new DataStreamAutoShardingEvent( - indices.get(indices.size() - 1).getName(), - randomIntBetween(1, 10), - randomMillisUpToYear9999() + DataStream.DataStreamIndices.backingIndicesBuilder(indices) + .setRolloverOnWrite(replicated == false && randomBoolean()) + .setAutoShardingEvent( + randomBoolean() + ? new DataStreamAutoShardingEvent( + indices.get(indices.size() - 1).getName(), + randomIntBetween(1, 10), + randomMillisUpToYear9999() + ) + : null ) - : null + .build(), + DataStream.DataStreamIndices.failureIndicesBuilder(failureIndices) + .setRolloverOnWrite(failureStore && replicated == false && randomBoolean()) + .setAutoShardingEvent( + failureStore && randomBoolean() + ? new DataStreamAutoShardingEvent( + indices.get(indices.size() - 1).getName(), + randomIntBetween(1, 10), + randomMillisUpToYear9999() + ) + : null + ) + .build() ); } diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java index 28983fe34df9..2f8cccdc303e 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java @@ -816,7 +816,10 @@ private SingleForecast forecast(Metadata metadata, DataStream stream, long forec Map newIndices = new HashMap<>(); for (int i = 0; i < numberNewIndices; ++i) { final String uuid = UUIDs.randomBase64UUID(); - final Tuple rolledDataStreamInfo = stream.unsafeNextWriteIndexAndGeneration(state.metadata()); + final Tuple rolledDataStreamInfo = stream.unsafeNextWriteIndexAndGeneration( + state.metadata(), + stream.getBackingIndices() + ); stream = stream.unsafeRollover( new Index(rolledDataStreamInfo.v1(), uuid), rolledDataStreamInfo.v2(), diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java index 0a0cade089fa..a0917c1cef81 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java @@ -330,11 +330,12 @@ static DataStream updateLocalDataStream( // just copying the data stream is in this case safe. return remoteDataStream.copy() .setName(localDataStreamName) - .setIndices(List.of(backingIndexToFollow)) + .setBackingIndices( + // Replicated data streams can't be rolled over, so having the `rolloverOnWrite` flag set to `true` wouldn't make sense + // (and potentially even break things). + remoteDataStream.getBackingIndices().copy().setIndices(List.of(backingIndexToFollow)).setRolloverOnWrite(false).build() + ) .setReplicated(true) - // Replicated data streams can't be rolled over, so having the `rolloverOnWrite` flag set to `true` wouldn't make sense - // (and potentially even break things). - .setRolloverOnWrite(false) .build(); } else { if (localDataStream.isReplicated() == false) { @@ -376,7 +377,7 @@ static DataStream updateLocalDataStream( } return localDataStream.copy() - .setIndices(backingIndices) + .setBackingIndices(localDataStream.getBackingIndices().copy().setIndices(backingIndices).build()) .setGeneration(remoteDataStream.getGeneration()) .setMetadata(remoteDataStream.getMetadata()) .build(); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamSecurityIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamSecurityIT.java index 96284b2826e4..e37823f8d3c4 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamSecurityIT.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamSecurityIT.java @@ -90,7 +90,12 @@ public ClusterState execute(ClusterState currentState) throws Exception { ? original.getIndices().get(0).getName() + "-broken" : original.getIndices().get(0).getName(); DataStream broken = original.copy() - .setIndices(List.of(new Index(brokenIndexName, "broken"), original.getIndices().get(1))) + .setBackingIndices( + original.getBackingIndices() + .copy() + .setIndices(List.of(new Index(brokenIndexName, "broken"), original.getIndices().get(1))) + .build() + ) .build(); brokenDataStreamHolder.set(broken); return ClusterState.builder(currentState) From d3a285e1c7b31eae3645f2c5b7054a0ac5abe259 Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Tue, 14 May 2024 11:40:27 +0200 Subject: [PATCH 117/119] Fix testDanglingIndicesCanBeListed (#108599) The test started failing because of the recent changes to allow closing (and deleting shards) asynchronously. As a result dandling index API now is seeing a directory in partially deleted state, fails to interpret partial data and fails as a result. The fix retries the failure on the client. --- .../http/DanglingIndicesRestIT.java | 29 +++++-------------- .../elasticsearch/test/XContentTestUtils.java | 5 ++++ 2 files changed, 13 insertions(+), 21 deletions(-) diff --git a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/DanglingIndicesRestIT.java b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/DanglingIndicesRestIT.java index eaf439f264ad..d04c8802635d 100644 --- a/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/DanglingIndicesRestIT.java +++ b/qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/DanglingIndicesRestIT.java @@ -72,7 +72,7 @@ public void testDanglingIndicesCanBeListed() throws Exception { internalCluster().startNodes(3, buildSettings(0)); final DanglingIndexDetails danglingIndexDetails = createDanglingIndices(INDEX_NAME); - final String stoppedNodeId = mapNodeNameToId(danglingIndexDetails.stoppedNodeName); + final String stoppedNodeId = getNodeId(danglingIndexDetails.stoppedNodeName); final RestClient restClient = getRestClient(); @@ -163,7 +163,12 @@ public void testDanglingIndicesCanBeDeleted() throws Exception { // tombstone has been pushed out of the graveyard. createIndex("additional"); deleteIndex("additional"); - assertThat(listDanglingIndexIds(), is(empty())); + // reading dangling index metadata happens without the all shard locks + // (as we do not know the index name from the index directory structure). + // As a result the index directory could be updated or deleted in the meanwhile by any concurrent operation + // and result in the node request failure that is going to be propagated to the API call. + // Since dandling index API is a best effort we expect such failures to be retried on the client level. + assertBusy(() -> assertThat(listDanglingIndexIds(), is(empty()))); } private List listDanglingIndexIds() throws IOException { @@ -171,15 +176,14 @@ private List listDanglingIndexIds() throws IOException { assertOK(response); final XContentTestUtils.JsonMapView mapView = createJsonMapView(response.getEntity().getContent()); + logger.warn("dangling API response: {}", mapView); assertThat(mapView.get("_nodes.total"), equalTo(3)); assertThat(mapView.get("_nodes.successful"), equalTo(3)); assertThat(mapView.get("_nodes.failed"), equalTo(0)); List indices = mapView.get("dangling_indices"); - List danglingIndexIds = new ArrayList<>(); - for (int i = 0; i < indices.size(); i++) { danglingIndexIds.add(mapView.get("dangling_indices." + i + ".index_uuid")); } @@ -187,23 +191,6 @@ private List listDanglingIndexIds() throws IOException { return danglingIndexIds; } - /** - * Given a node name, finds the corresponding node ID. - */ - private String mapNodeNameToId(String nodeName) throws IOException { - final Response catResponse = getRestClient().performRequest(new Request("GET", "/_cat/nodes?full_id&h=id,name")); - assertOK(catResponse); - - for (String nodeLine : Streams.readAllLines(catResponse.getEntity().getContent())) { - String[] elements = nodeLine.split(" "); - if (elements[1].equals(nodeName)) { - return elements[0]; - } - } - - throw new AssertionError("Failed to map node name [" + nodeName + "] to node ID"); - } - /** * Helper that creates one or more indices, and importantly, * checks that they are green before proceeding. This is important diff --git a/test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java b/test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java index 40cdacb767d0..e05c2dde930a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java @@ -354,5 +354,10 @@ public T get(String path) { } return (T) context; } + + @Override + public String toString() { + return "JsonMapView{map=" + map + '}'; + } } } From 3fe478551f8b3616836e08c6745d16753381dd08 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 14 May 2024 10:42:56 +0100 Subject: [PATCH 118/119] Remove unused `?master_node_timeout` compat shims (#108601) These overloads are no longer used so this commit removes them. Relates #107984 --- .../action/support/master/AcknowledgedRequest.java | 5 ----- .../xpack/core/ml/action/GetMlAutoscalingStats.java | 5 ----- 2 files changed, 10 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java b/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java index 2bbe3d36f031..b6389d0b112b 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequest.java @@ -52,11 +52,6 @@ protected AcknowledgedRequest(TimeValue masterNodeTimeout, TimeValue ackTimeout) this.ackTimeout = Objects.requireNonNull(ackTimeout); } - @Deprecated(forRemoval = true) // just a temporary compatibility shim - protected AcknowledgedRequest(TimeValue ackTimeout) { - this(MasterNodeRequest.TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, ackTimeout); - } - protected AcknowledgedRequest(StreamInput in) throws IOException { super(in); this.ackTimeout = in.readTimeValue(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStats.java index c23d75f02937..95fa59541171 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetMlAutoscalingStats.java @@ -45,11 +45,6 @@ public Request(TimeValue masterNodeTimeout, TimeValue requestTimeout) { this.requestTimeout = Objects.requireNonNull(requestTimeout); } - @Deprecated(forRemoval = true) // temporary compatibility shi - public Request(TimeValue timeout) { - this(TRAPPY_IMPLICIT_DEFAULT_MASTER_NODE_TIMEOUT, timeout); - } - public Request(StreamInput in) throws IOException { super(in); this.requestTimeout = in.readTimeValue(); From e01600d5a53b8e41e7a084b617fea8ae82560d06 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Tue, 14 May 2024 11:02:42 +0100 Subject: [PATCH 119/119] Create a custom parser for parsing ISO8601 datetime variants (#106486) This adds a hand-written parser for parsing fixed ISO8601 datetime strings, for the `iso8601`, `strict_date_optional_time`, and `strict_date_optional_time_nanos` date formats. If the new parser fails to parse a string, the existing parsers are then tried, so existing behaviour is maintained. There is a new JVM option added that can force use of the existing parsers, if that is needed for any reason. --- docs/changelog/106486.yaml | 17 + .../common/time/CharSubSequence.java | 68 +++ .../common/time/DateFormatters.java | 135 +++-- .../elasticsearch/common/time/DateTime.java | 150 +++++ .../common/time/Iso8601DateTimeParser.java | 78 +++ .../common/time/Iso8601Parser.java | 521 ++++++++++++++++++ .../common/time/JavaDateFormatter.java | 19 + .../common/time/Iso8601ParserTests.java | 427 ++++++++++++++ 8 files changed, 1371 insertions(+), 44 deletions(-) create mode 100644 docs/changelog/106486.yaml create mode 100644 server/src/main/java/org/elasticsearch/common/time/CharSubSequence.java create mode 100644 server/src/main/java/org/elasticsearch/common/time/DateTime.java create mode 100644 server/src/main/java/org/elasticsearch/common/time/Iso8601DateTimeParser.java create mode 100644 server/src/main/java/org/elasticsearch/common/time/Iso8601Parser.java create mode 100644 server/src/test/java/org/elasticsearch/common/time/Iso8601ParserTests.java diff --git a/docs/changelog/106486.yaml b/docs/changelog/106486.yaml new file mode 100644 index 000000000000..b33df50780e0 --- /dev/null +++ b/docs/changelog/106486.yaml @@ -0,0 +1,17 @@ +pr: 106486 +summary: Create custom parser for ISO-8601 datetimes +area: Infra/Core +type: enhancement +issues: + - 102063 +highlight: + title: New custom parser for ISO-8601 datetimes + body: |- + This introduces a new custom parser for ISO-8601 datetimes, for the `iso8601`, `strict_date_optional_time`, and + `strict_date_optional_time_nanos` built-in date formats. This provides a performance improvement over the + default Java date-time parsing. Whilst it maintains much of the same behaviour, + the new parser does not accept nonsensical date-time strings that have multiple fractional seconds fields + or multiple timezone specifiers. If the new parser fails to parse a string, it will then use the previous parser + to parse it. If a large proportion of the input data consists of these invalid strings, this may cause + a small performance degradation. If you wish to force the use of the old parsers regardless, + set the JVM property `es.datetime.java_time_parsers=true` on all ES nodes. diff --git a/server/src/main/java/org/elasticsearch/common/time/CharSubSequence.java b/server/src/main/java/org/elasticsearch/common/time/CharSubSequence.java new file mode 100644 index 000000000000..39dbb83bdf5a --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/time/CharSubSequence.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.time; + +import java.util.stream.IntStream; + +/** + * A CharSequence that provides a subsequence of another CharSequence without allocating a new backing array (as String does) + */ +class CharSubSequence implements CharSequence { + private final CharSequence wrapped; + private final int startOffset; // inclusive + private final int endOffset; // exclusive + + CharSubSequence(CharSequence wrapped, int startOffset, int endOffset) { + if (startOffset < 0) throw new IllegalArgumentException(); + if (endOffset > wrapped.length()) throw new IllegalArgumentException(); + if (endOffset < startOffset) throw new IllegalArgumentException(); + + this.wrapped = wrapped; + this.startOffset = startOffset; + this.endOffset = endOffset; + } + + @Override + public int length() { + return endOffset - startOffset; + } + + @Override + public char charAt(int index) { + int adjustedIndex = index + startOffset; + if (adjustedIndex < startOffset || adjustedIndex >= endOffset) throw new IndexOutOfBoundsException(index); + return wrapped.charAt(adjustedIndex); + } + + @Override + public boolean isEmpty() { + return startOffset == endOffset; + } + + @Override + public CharSequence subSequence(int start, int end) { + int adjustedStart = start + startOffset; + int adjustedEnd = end + startOffset; + if (adjustedStart < startOffset) throw new IndexOutOfBoundsException(start); + if (adjustedEnd > endOffset) throw new IndexOutOfBoundsException(end); + if (adjustedStart > adjustedEnd) throw new IndexOutOfBoundsException(); + + return wrapped.subSequence(adjustedStart, adjustedEnd); + } + + @Override + public IntStream chars() { + return wrapped.chars().skip(startOffset).limit(endOffset - startOffset); + } + + @Override + public String toString() { + return wrapped.subSequence(startOffset, endOffset).toString(); + } +} diff --git a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java index 7dae11fb8d72..1133eac3f8f7 100644 --- a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java +++ b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java @@ -9,7 +9,10 @@ package org.elasticsearch.common.time; import org.elasticsearch.common.Strings; +import org.elasticsearch.core.Booleans; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.core.UpdateForV9; +import org.elasticsearch.logging.internal.spi.LoggerFactory; import java.time.Instant; import java.time.LocalDate; @@ -30,6 +33,7 @@ import java.time.temporal.TemporalQuery; import java.time.temporal.WeekFields; import java.util.Locale; +import java.util.Set; import java.util.stream.Stream; import static java.time.temporal.ChronoField.DAY_OF_MONTH; @@ -43,6 +47,24 @@ public class DateFormatters { + /** + * The ISO8601 parser is as close as possible to the java.time based parsers, but there are some strings + * that are no longer accepted (multiple fractional seconds, or multiple timezones) by the ISO parser. + * If a string cannot be parsed by the ISO parser, it then tries the java.time one. + * If there's lots of these strings, trying the ISO parser, then the java.time parser, might cause a performance drop. + * So provide a JVM option so that users can just use the java.time parsers, if they really need to. + */ + @UpdateForV9 // evaluate if we need to deprecate/remove this + private static final boolean JAVA_TIME_PARSERS_ONLY = Booleans.parseBoolean(System.getProperty("es.datetime.java_time_parsers"), false); + + static { + // when this is used directly in tests ES logging may not have been initialized yet + LoggerFactory logger; + if (JAVA_TIME_PARSERS_ONLY && (logger = LoggerFactory.provider()) != null) { + logger.getLogger(DateFormatters.class).info("Using java.time datetime parsers only"); + } + } + private static DateFormatter newDateFormatter(String format, DateTimeFormatter formatter) { return new JavaDateFormatter(format, new JavaTimeDateTimePrinter(formatter), new JavaTimeDateTimeParser(formatter)); } @@ -168,11 +190,18 @@ private static DateFormatter newDateFormatter(String format, DateTimeFormatter p /** * Returns a generic ISO datetime parser where the date is mandatory and the time is optional. */ - private static final DateFormatter STRICT_DATE_OPTIONAL_TIME = newDateFormatter( - "strict_date_optional_time", - STRICT_DATE_OPTIONAL_TIME_PRINTER, - STRICT_DATE_OPTIONAL_TIME_FORMATTER - ); + private static final DateFormatter STRICT_DATE_OPTIONAL_TIME; + static { + DateTimeParser javaTimeParser = new JavaTimeDateTimeParser(STRICT_DATE_OPTIONAL_TIME_FORMATTER); + + STRICT_DATE_OPTIONAL_TIME = new JavaDateFormatter( + "strict_date_optional_time", + new JavaTimeDateTimePrinter(STRICT_DATE_OPTIONAL_TIME_PRINTER), + JAVA_TIME_PARSERS_ONLY + ? new DateTimeParser[] { javaTimeParser } + : new DateTimeParser[] { new Iso8601DateTimeParser(Set.of(), false).withLocale(Locale.ROOT), javaTimeParser } + ); + } private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS = new DateTimeFormatterBuilder().append( STRICT_YEAR_MONTH_DAY_FORMATTER @@ -224,51 +253,69 @@ private static DateFormatter newDateFormatter(String format, DateTimeFormatter p /** * Returns a generic ISO datetime parser where the date is mandatory and the time is optional with nanosecond resolution. */ - private static final DateFormatter STRICT_DATE_OPTIONAL_TIME_NANOS = newDateFormatter( - "strict_date_optional_time_nanos", - STRICT_DATE_OPTIONAL_TIME_PRINTER_NANOS, - STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS - ); + private static final DateFormatter STRICT_DATE_OPTIONAL_TIME_NANOS; + static { + DateTimeParser javaTimeParser = new JavaTimeDateTimeParser(STRICT_DATE_OPTIONAL_TIME_FORMATTER_WITH_NANOS); + + STRICT_DATE_OPTIONAL_TIME_NANOS = new JavaDateFormatter( + "strict_date_optional_time_nanos", + new JavaTimeDateTimePrinter(STRICT_DATE_OPTIONAL_TIME_PRINTER_NANOS), + JAVA_TIME_PARSERS_ONLY + ? new DateTimeParser[] { javaTimeParser } + : new DateTimeParser[] { + new Iso8601DateTimeParser(Set.of(HOUR_OF_DAY, MINUTE_OF_HOUR, SECOND_OF_MINUTE), true).withLocale(Locale.ROOT), + javaTimeParser } + ); + } /** * Returns a ISO 8601 compatible date time formatter and parser. * This is not fully compatible to the existing spec, which would require far more edge cases, but merely compatible with the * existing legacy joda time ISO date formatter */ - private static final DateFormatter ISO_8601 = newDateFormatter( - "iso8601", - STRICT_DATE_OPTIONAL_TIME_PRINTER, - new DateTimeFormatterBuilder().append(STRICT_YEAR_MONTH_DAY_FORMATTER) - .optionalStart() - .appendLiteral('T') - .optionalStart() - .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) - .optionalStart() - .appendLiteral(':') - .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) - .optionalStart() - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) - .optionalStart() - .appendFraction(NANO_OF_SECOND, 1, 9, true) - .optionalEnd() - .optionalStart() - .appendLiteral(",") - .appendFraction(NANO_OF_SECOND, 1, 9, false) - .optionalEnd() - .optionalEnd() - .optionalEnd() - .optionalEnd() - .optionalStart() - .appendZoneOrOffsetId() - .optionalEnd() - .optionalStart() - .append(TIME_ZONE_FORMATTER_NO_COLON) - .optionalEnd() - .optionalEnd() - .toFormatter(Locale.ROOT) - .withResolverStyle(ResolverStyle.STRICT) - ); + private static final DateFormatter ISO_8601; + static { + DateTimeParser javaTimeParser = new JavaTimeDateTimeParser( + new DateTimeFormatterBuilder().append(STRICT_YEAR_MONTH_DAY_FORMATTER) + .optionalStart() + .appendLiteral('T') + .optionalStart() + .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendFraction(NANO_OF_SECOND, 1, 9, true) + .optionalEnd() + .optionalStart() + .appendLiteral(",") + .appendFraction(NANO_OF_SECOND, 1, 9, false) + .optionalEnd() + .optionalEnd() + .optionalEnd() + .optionalEnd() + .optionalStart() + .appendZoneOrOffsetId() + .optionalEnd() + .optionalStart() + .append(TIME_ZONE_FORMATTER_NO_COLON) + .optionalEnd() + .optionalEnd() + .toFormatter(Locale.ROOT) + .withResolverStyle(ResolverStyle.STRICT) + ); + + ISO_8601 = new JavaDateFormatter( + "iso8601", + new JavaTimeDateTimePrinter(STRICT_DATE_OPTIONAL_TIME_PRINTER), + JAVA_TIME_PARSERS_ONLY + ? new DateTimeParser[] { javaTimeParser } + : new DateTimeParser[] { new Iso8601DateTimeParser(Set.of(), false).withLocale(Locale.ROOT), javaTimeParser } + ); + } ///////////////////////////////////////// // diff --git a/server/src/main/java/org/elasticsearch/common/time/DateTime.java b/server/src/main/java/org/elasticsearch/common/time/DateTime.java new file mode 100644 index 000000000000..101389b43d9f --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/time/DateTime.java @@ -0,0 +1,150 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.time; + +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.temporal.ChronoField; +import java.time.temporal.TemporalAccessor; +import java.time.temporal.TemporalField; +import java.time.temporal.TemporalQueries; +import java.time.temporal.TemporalQuery; +import java.time.temporal.UnsupportedTemporalTypeException; + +/** + * Provides information on a parsed datetime + */ +record DateTime( + int years, + Integer months, + Integer days, + Integer hours, + Integer minutes, + Integer seconds, + Integer nanos, + ZoneId zoneId, + ZoneOffset offset +) implements TemporalAccessor { + + @Override + @SuppressWarnings("unchecked") + public R query(TemporalQuery query) { + // shortcut a few queries used by DateFormatters.from + if (query == TemporalQueries.zoneId()) { + return (R) zoneId; + } + if (query == TemporalQueries.offset()) { + return (R) offset; + } + if (query == DateFormatters.LOCAL_DATE_QUERY || query == TemporalQueries.localDate()) { + if (months != null && days != null) { + return (R) LocalDate.of(years, months, days); + } + return null; + } + if (query == TemporalQueries.localTime()) { + if (hours != null && minutes != null && seconds != null) { + return (R) LocalTime.of(hours, minutes, seconds, nanos != null ? nanos : 0); + } + return null; + } + return TemporalAccessor.super.query(query); + } + + @Override + public boolean isSupported(TemporalField field) { + if (field instanceof ChronoField f) { + return switch (f) { + case YEAR -> true; + case MONTH_OF_YEAR -> months != null; + case DAY_OF_MONTH -> days != null; + case HOUR_OF_DAY -> hours != null; + case MINUTE_OF_HOUR -> minutes != null; + case SECOND_OF_MINUTE -> seconds != null; + case INSTANT_SECONDS -> months != null && days != null && hours != null && minutes != null && seconds != null; + // if the time components are there, we just default nanos to 0 if it's not present + case SECOND_OF_DAY, NANO_OF_SECOND, NANO_OF_DAY -> hours != null && minutes != null && seconds != null; + case OFFSET_SECONDS -> offset != null; + default -> false; + }; + } + + return field.isSupportedBy(this); + } + + @Override + public long getLong(TemporalField field) { + if (field instanceof ChronoField f) { + switch (f) { + case YEAR -> { + return years; + } + case MONTH_OF_YEAR -> { + return extractValue(f, months); + } + case DAY_OF_MONTH -> { + return extractValue(f, days); + } + case HOUR_OF_DAY -> { + return extractValue(f, hours); + } + case MINUTE_OF_HOUR -> { + return extractValue(f, minutes); + } + case SECOND_OF_MINUTE -> { + return extractValue(f, seconds); + } + case INSTANT_SECONDS -> { + if (isSupported(ChronoField.INSTANT_SECONDS) == false) { + throw new UnsupportedTemporalTypeException("No " + f + " value available"); + } + return LocalDateTime.of(years, months, days, hours, minutes, seconds) + .toEpochSecond(offset != null ? offset : ZoneOffset.UTC); + } + case SECOND_OF_DAY -> { + if (isSupported(ChronoField.SECOND_OF_DAY) == false) { + throw new UnsupportedTemporalTypeException("No " + f + " value available"); + } + return LocalTime.of(hours, minutes, seconds).toSecondOfDay(); + } + case NANO_OF_SECOND -> { + if (isSupported(ChronoField.NANO_OF_SECOND) == false) { + throw new UnsupportedTemporalTypeException("No " + f + " value available"); + } + return nanos != null ? nanos.longValue() : 0L; + } + case NANO_OF_DAY -> { + if (isSupported(ChronoField.NANO_OF_DAY) == false) { + throw new UnsupportedTemporalTypeException("No " + f + " value available"); + } + return LocalTime.of(hours, minutes, seconds, nanos != null ? nanos : 0).toNanoOfDay(); + } + case OFFSET_SECONDS -> { + if (offset == null) { + throw new UnsupportedTemporalTypeException("No " + f + " value available"); + } + return offset.getTotalSeconds(); + } + default -> throw new UnsupportedTemporalTypeException("No " + f + " value available"); + } + } + + return field.getFrom(this); + } + + private static long extractValue(ChronoField field, Number value) { + if (value == null) { + throw new UnsupportedTemporalTypeException("No " + field + " value available"); + } + return value.longValue(); + } +} diff --git a/server/src/main/java/org/elasticsearch/common/time/Iso8601DateTimeParser.java b/server/src/main/java/org/elasticsearch/common/time/Iso8601DateTimeParser.java new file mode 100644 index 000000000000..2a526a36408c --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/time/Iso8601DateTimeParser.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.time; + +import java.time.ZoneId; +import java.time.format.DateTimeParseException; +import java.time.temporal.ChronoField; +import java.time.temporal.TemporalAccessor; +import java.util.Locale; +import java.util.Map; +import java.util.Optional; +import java.util.Set; + +class Iso8601DateTimeParser implements DateTimeParser { + + private final Iso8601Parser parser; + private final ZoneId timezone; + // the locale doesn't actually matter, as we're parsing in a standardised format + // and we already account for . or , in decimals + private final Locale locale; + + Iso8601DateTimeParser(Set mandatoryFields, boolean optionalTime) { + parser = new Iso8601Parser(mandatoryFields, optionalTime, Map.of()); + timezone = null; + locale = null; + } + + private Iso8601DateTimeParser(Iso8601Parser parser, ZoneId timezone, Locale locale) { + this.parser = parser; + this.timezone = timezone; + this.locale = locale; + } + + @Override + public ZoneId getZone() { + return timezone; + } + + @Override + public Locale getLocale() { + return locale; + } + + @Override + public DateTimeParser withZone(ZoneId zone) { + return new Iso8601DateTimeParser(parser, zone, locale); + } + + @Override + public DateTimeParser withLocale(Locale locale) { + return new Iso8601DateTimeParser(parser, timezone, locale); + } + + Iso8601DateTimeParser withDefaults(Map defaults) { + return new Iso8601DateTimeParser(new Iso8601Parser(parser.mandatoryFields(), parser.optionalTime(), defaults), timezone, locale); + } + + @Override + public TemporalAccessor parse(CharSequence str) { + var result = parser.tryParse(str, timezone); + var temporal = result.result(); + if (temporal == null) { + throw new DateTimeParseException("Could not fully parse datetime", str, result.errorIndex()); + } + return temporal; + } + + @Override + public Optional tryParse(CharSequence str) { + return Optional.ofNullable(parser.tryParse(str, timezone).result()); + } +} diff --git a/server/src/main/java/org/elasticsearch/common/time/Iso8601Parser.java b/server/src/main/java/org/elasticsearch/common/time/Iso8601Parser.java new file mode 100644 index 000000000000..4f1d131dd8ce --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/time/Iso8601Parser.java @@ -0,0 +1,521 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.time; + +import org.elasticsearch.core.Nullable; + +import java.time.DateTimeException; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.temporal.ChronoField; +import java.util.EnumMap; +import java.util.EnumSet; +import java.util.Map; +import java.util.Set; + +/** + * Parses datetimes in ISO8601 format (and subsequences thereof). + *

+ * This is faster than the generic parsing in {@link java.time.format.DateTimeFormatter}, as this is hard-coded and specific to ISO-8601. + * Various public libraries provide their own variant of this mechanism. We use our own for a few reasons: + *

    + *
  • + * We are historically a bit more lenient with strings that are invalid according to the strict specification + * (eg using a zone region instead of offset for timezone) + *
  • + *
  • Various built-in formats specify some fields as mandatory and some as optional
  • + *
  • Callers can specify defaults for fields that are not present (eg for roundup parsers)
  • + *
+ * We also do not use exceptions here, instead returning {@code null} for any invalid values, that are then + * checked and propagated as appropriate. + */ +class Iso8601Parser { + + /** + * The result of the parse. If successful, {@code result} will be non-null. + * If parse failed, {@code errorIndex} specifies the index into the parsed string + * that the first invalid data was encountered. + */ + record Result(@Nullable DateTime result, int errorIndex) { + Result(DateTime result) { + this(result, -1); + } + + static Result error(int errorIndex) { + return new Result(null, errorIndex); + } + } + + private static final Set VALID_MANDATORY_FIELDS = EnumSet.of( + ChronoField.YEAR, + ChronoField.MONTH_OF_YEAR, + ChronoField.DAY_OF_MONTH, + ChronoField.HOUR_OF_DAY, + ChronoField.MINUTE_OF_HOUR, + ChronoField.SECOND_OF_MINUTE + ); + + private static final Set VALID_DEFAULT_FIELDS = EnumSet.of( + ChronoField.MONTH_OF_YEAR, + ChronoField.DAY_OF_MONTH, + ChronoField.HOUR_OF_DAY, + ChronoField.MINUTE_OF_HOUR, + ChronoField.SECOND_OF_MINUTE, + ChronoField.NANO_OF_SECOND + ); + + private final Set mandatoryFields; + private final boolean optionalTime; + private final Map defaults; + + /** + * Constructs a new {@code Iso8601Parser} object + * + * @param mandatoryFields + * The set of fields that must be present for a valid parse. These should be specified in field order + * (eg if {@link ChronoField#DAY_OF_MONTH} is specified, {@link ChronoField#MONTH_OF_YEAR} should also be specified). + * {@link ChronoField#YEAR} is always mandatory. + * @param optionalTime + * {@code false} if the presence of time fields follows {@code mandatoryFields}, + * {@code true} if a time component is always optional, despite the presence of time fields in {@code mandatoryFields}. + * This makes it possible to specify 'time is optional, but if it is present, it must have these fields' + * by settings {@code optionalTime = true} and putting time fields such as {@link ChronoField#HOUR_OF_DAY} + * and {@link ChronoField#MINUTE_OF_HOUR} in {@code mandatoryFields}. + * @param defaults + * Map of default field values, if they are not present in the parsed string. + */ + Iso8601Parser(Set mandatoryFields, boolean optionalTime, Map defaults) { + checkChronoFields(mandatoryFields, VALID_MANDATORY_FIELDS); + checkChronoFields(defaults.keySet(), VALID_DEFAULT_FIELDS); + + this.mandatoryFields = EnumSet.of(ChronoField.YEAR); // year is always mandatory + this.mandatoryFields.addAll(mandatoryFields); + this.optionalTime = optionalTime; + this.defaults = defaults.isEmpty() ? Map.of() : new EnumMap<>(defaults); + } + + private static void checkChronoFields(Set fields, Set validFields) { + if (fields.isEmpty()) return; // nothing to check + + fields = EnumSet.copyOf(fields); + fields.removeAll(validFields); + if (fields.isEmpty() == false) { + throw new IllegalArgumentException("Invalid chrono fields specified " + fields); + } + } + + boolean optionalTime() { + return optionalTime; + } + + Set mandatoryFields() { + return mandatoryFields; + } + + private boolean isOptional(ChronoField field) { + return mandatoryFields.contains(field) == false; + } + + private Integer defaultZero(ChronoField field) { + return defaults.getOrDefault(field, 0); + } + + /** + * Attempts to parse {@code str} as an ISO-8601 datetime, returning a {@link Result} indicating if the parse + * was successful or not, and what fields were present. + * @param str The string to parse + * @param defaultTimezone The default timezone to return, if no timezone is present in the string + * @return The {@link Result} of the parse. + */ + Result tryParse(CharSequence str, @Nullable ZoneId defaultTimezone) { + if (str.charAt(0) == '-') { + // the year is negative. This is most unusual. + // Instead of always adding offsets and dynamically calculating position in the main parser code below, + // just in case it starts with a -, just parse the substring, then adjust the output appropriately + Result result = parse(new CharSubSequence(str, 1, str.length()), defaultTimezone); + + if (result.errorIndex() >= 0) { + return Result.error(result.errorIndex() + 1); + } else { + DateTime dt = result.result(); + return new Result( + new DateTime( + -dt.years(), + dt.months(), + dt.days(), + dt.hours(), + dt.minutes(), + dt.seconds(), + dt.nanos(), + dt.zoneId(), + dt.offset() + ) + ); + } + } else { + return parse(str, defaultTimezone); + } + } + + /** + * Index {@code i} is the multiplicand to get the number of nanos from the fractional second with {@code i=9-d} digits. + */ + private static final int[] NANO_MULTIPLICANDS = new int[] { 1, 10, 100, 1_000, 10_000, 100_000, 1_000_000, 10_000_000, 100_000_000 }; + + /** + * Parses {@code str} in ISO8601 format. + *

+ * This parses the string using fixed offsets (it does not support variable-width fields) and separators, + * sequentially parsing each field and looking for the correct separator. + * This enables it to be very fast, as all the fields are in fixed places in the string. + * The only variable aspect comes from the timezone, which (fortunately) is only present at the end of the string, + * at any point after a time field. + * It also does not use exceptions, instead returning {@code null} where a value cannot be parsed. + */ + private Result parse(CharSequence str, @Nullable ZoneId defaultTimezone) { + int len = str.length(); + + // YEARS + Integer years = parseInt(str, 0, 4); + if (years == null) return Result.error(0); + if (len == 4) { + return isOptional(ChronoField.MONTH_OF_YEAR) + ? new Result( + withZoneOffset( + years, + defaults.get(ChronoField.MONTH_OF_YEAR), + defaults.get(ChronoField.DAY_OF_MONTH), + defaults.get(ChronoField.HOUR_OF_DAY), + defaults.get(ChronoField.MINUTE_OF_HOUR), + defaults.get(ChronoField.SECOND_OF_MINUTE), + defaults.get(ChronoField.NANO_OF_SECOND), + defaultTimezone + ) + ) + : Result.error(4); + } + + if (str.charAt(4) != '-') return Result.error(4); + + // MONTHS + Integer months = parseInt(str, 5, 7); + if (months == null || months > 12) return Result.error(5); + if (len == 7) { + return isOptional(ChronoField.DAY_OF_MONTH) + ? new Result( + withZoneOffset( + years, + months, + defaults.get(ChronoField.DAY_OF_MONTH), + defaults.get(ChronoField.HOUR_OF_DAY), + defaults.get(ChronoField.MINUTE_OF_HOUR), + defaults.get(ChronoField.SECOND_OF_MINUTE), + defaults.get(ChronoField.NANO_OF_SECOND), + defaultTimezone + ) + ) + : Result.error(7); + } + + if (str.charAt(7) != '-') return Result.error(7); + + // DAYS + Integer days = parseInt(str, 8, 10); + if (days == null || days > 31) return Result.error(8); + if (len == 10) { + return optionalTime || isOptional(ChronoField.HOUR_OF_DAY) + ? new Result( + withZoneOffset( + years, + months, + days, + defaults.get(ChronoField.HOUR_OF_DAY), + defaults.get(ChronoField.MINUTE_OF_HOUR), + defaults.get(ChronoField.SECOND_OF_MINUTE), + defaults.get(ChronoField.NANO_OF_SECOND), + defaultTimezone + ) + ) + : Result.error(10); + } + + if (str.charAt(10) != 'T') return Result.error(10); + if (len == 11) { + return isOptional(ChronoField.HOUR_OF_DAY) + ? new Result( + withZoneOffset( + years, + months, + days, + defaults.get(ChronoField.HOUR_OF_DAY), + defaults.get(ChronoField.MINUTE_OF_HOUR), + defaults.get(ChronoField.SECOND_OF_MINUTE), + defaults.get(ChronoField.NANO_OF_SECOND), + defaultTimezone + ) + ) + : Result.error(11); + } + + // HOURS + timezone + Integer hours = parseInt(str, 11, 13); + if (hours == null || hours > 23) return Result.error(11); + if (len == 13) { + return isOptional(ChronoField.MINUTE_OF_HOUR) + ? new Result( + withZoneOffset( + years, + months, + days, + hours, + defaultZero(ChronoField.MINUTE_OF_HOUR), + defaultZero(ChronoField.SECOND_OF_MINUTE), + defaultZero(ChronoField.NANO_OF_SECOND), + defaultTimezone + ) + ) + : Result.error(13); + } + if (isZoneId(str, 13)) { + ZoneId timezone = parseZoneId(str, 13); + return timezone != null && isOptional(ChronoField.MINUTE_OF_HOUR) + ? new Result( + withZoneOffset( + years, + months, + days, + hours, + defaultZero(ChronoField.MINUTE_OF_HOUR), + defaultZero(ChronoField.SECOND_OF_MINUTE), + defaultZero(ChronoField.NANO_OF_SECOND), + timezone + ) + ) + : Result.error(13); + } + + if (str.charAt(13) != ':') return Result.error(13); + + // MINUTES + timezone + Integer minutes = parseInt(str, 14, 16); + if (minutes == null || minutes > 59) return Result.error(14); + if (len == 16) { + return isOptional(ChronoField.SECOND_OF_MINUTE) + ? new Result( + withZoneOffset( + years, + months, + days, + hours, + minutes, + defaultZero(ChronoField.SECOND_OF_MINUTE), + defaultZero(ChronoField.NANO_OF_SECOND), + defaultTimezone + ) + ) + : Result.error(16); + } + if (isZoneId(str, 16)) { + ZoneId timezone = parseZoneId(str, 16); + return timezone != null && isOptional(ChronoField.SECOND_OF_MINUTE) + ? new Result( + withZoneOffset( + years, + months, + days, + hours, + minutes, + defaultZero(ChronoField.SECOND_OF_MINUTE), + defaultZero(ChronoField.NANO_OF_SECOND), + timezone + ) + ) + : Result.error(16); + } + + if (str.charAt(16) != ':') return Result.error(16); + + // SECONDS + timezone + Integer seconds = parseInt(str, 17, 19); + if (seconds == null || seconds > 59) return Result.error(17); + if (len == 19) { + return new Result( + withZoneOffset(years, months, days, hours, minutes, seconds, defaultZero(ChronoField.NANO_OF_SECOND), defaultTimezone) + ); + } + if (isZoneId(str, 19)) { + ZoneId timezone = parseZoneId(str, 19); + return timezone != null + ? new Result( + withZoneOffset(years, months, days, hours, minutes, seconds, defaultZero(ChronoField.NANO_OF_SECOND), timezone) + ) + : Result.error(19); + } + + char decSeparator = str.charAt(19); + if (decSeparator != '.' && decSeparator != ',') return Result.error(19); + + // NANOS + timezone + // nanos are always optional + // the last number could be millis or nanos, or any combination in the middle + // so we keep parsing numbers until we get to not a number + int nanos = 0; + int pos; + for (pos = 20; pos < len && pos < 29; pos++) { + char c = str.charAt(pos); + if (c < ZERO || c > NINE) break; + nanos = nanos * 10 + (c - ZERO); + } + + if (pos == 20) return Result.error(20); // didn't find a number at all + + // multiply it by the correct multiplicand to get the nanos + nanos *= NANO_MULTIPLICANDS[29 - pos]; + + if (len == pos) { + return new Result(withZoneOffset(years, months, days, hours, minutes, seconds, nanos, defaultTimezone)); + } + if (isZoneId(str, pos)) { + ZoneId timezone = parseZoneId(str, pos); + return timezone != null + ? new Result(withZoneOffset(years, months, days, hours, minutes, seconds, nanos, timezone)) + : Result.error(pos); + } + + // still chars left at the end - string is not valid + return Result.error(pos); + } + + private static boolean isZoneId(CharSequence str, int pos) { + // all region zoneIds must start with [A-Za-z] (see ZoneId#of) + // this also covers Z and UT/UTC/GMT zone variants + char c = str.charAt(pos); + return c == '+' || c == '-' || (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z'); + } + + /** + * This parses the zone offset, which is of the format accepted by {@link java.time.ZoneId#of(String)}. + * It has fast paths for numerical offsets, but falls back on {@code ZoneId.of} for non-trivial zone ids. + */ + private ZoneId parseZoneId(CharSequence str, int pos) { + int len = str.length(); + char first = str.charAt(pos); + + if (first == 'Z' && len == pos + 1) { + return ZoneOffset.UTC; + } + + boolean positive; + switch (first) { + case '+' -> positive = true; + case '-' -> positive = false; + default -> { + // non-trivial zone offset, fallback on the built-in java zoneid parser + try { + return ZoneId.of(str.subSequence(pos, str.length()).toString()); + } catch (DateTimeException e) { + return null; + } + } + } + pos++; // read the + or - + + Integer hours = parseInt(str, pos, pos += 2); + if (hours == null) return null; + if (len == pos) return ofHoursMinutesSeconds(hours, 0, 0, positive); + + boolean hasColon = false; + if (str.charAt(pos) == ':') { + pos++; + hasColon = true; + } + + Integer minutes = parseInt(str, pos, pos += 2); + if (minutes == null) return null; + if (len == pos) return ofHoursMinutesSeconds(hours, minutes, 0, positive); + + // either both dividers have a colon, or neither do + if ((str.charAt(pos) == ':') != hasColon) return null; + if (hasColon) { + pos++; + } + + Integer seconds = parseInt(str, pos, pos += 2); + if (seconds == null) return null; + if (len == pos) return ofHoursMinutesSeconds(hours, minutes, seconds, positive); + + // there's some text left over... + return null; + } + + /* + * ZoneOffset.ofTotalSeconds has a ConcurrentHashMap cache of offsets. This is fine, + * but it does mean there's an expensive map lookup every time we call ofTotalSeconds. + * There's no way to get round that, but we can at least have a very quick last-value cache here + * to avoid doing a full map lookup when there's lots of timestamps with the same offset being parsed + */ + private final ThreadLocal lastOffset = ThreadLocal.withInitial(() -> ZoneOffset.UTC); + + private ZoneOffset ofHoursMinutesSeconds(int hours, int minutes, int seconds, boolean positive) { + int totalSeconds = hours * 3600 + minutes * 60 + seconds; + if (positive == false) { + totalSeconds = -totalSeconds; + } + + // check the lastOffset value + ZoneOffset lastOffset = this.lastOffset.get(); + if (totalSeconds == lastOffset.getTotalSeconds()) { + return lastOffset; + } + + try { + ZoneOffset offset = ZoneOffset.ofTotalSeconds(totalSeconds); + this.lastOffset.set(lastOffset); + return offset; + } catch (DateTimeException e) { + // zoneoffset is out of range + return null; + } + } + + /** + * Create a {@code DateTime} object, with the ZoneOffset field set when the zone is an offset, not just an id. + */ + private static DateTime withZoneOffset( + int years, + Integer months, + Integer days, + Integer hours, + Integer minutes, + Integer seconds, + Integer nanos, + ZoneId zoneId + ) { + if (zoneId instanceof ZoneOffset zo) { + return new DateTime(years, months, days, hours, minutes, seconds, nanos, zoneId, zo); + } else { + return new DateTime(years, months, days, hours, minutes, seconds, nanos, zoneId, null); + } + } + + private static final char ZERO = '0'; + private static final char NINE = '9'; + + private static Integer parseInt(CharSequence str, int startInclusive, int endExclusive) { + if (str.length() < endExclusive) return null; + + int result = 0; + for (int i = startInclusive; i < endExclusive; i++) { + char c = str.charAt(i); + if (c < ZERO || c > NINE) return null; + result = result * 10 + (c - ZERO); + } + return result; + } +} diff --git a/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java b/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java index 9c39ee51276d..707b07c1d68d 100644 --- a/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java +++ b/server/src/main/java/org/elasticsearch/common/time/JavaDateFormatter.java @@ -21,15 +21,21 @@ import java.util.Collections; import java.util.List; import java.util.Locale; +import java.util.Map; import java.util.Objects; import java.util.function.UnaryOperator; +import static java.util.Map.entry; + class JavaDateFormatter implements DateFormatter { @SuppressWarnings("unchecked") private static T defaultRoundUp(T parser) { if (parser instanceof JavaTimeDateTimeParser jtp) { return (T) defaultRoundUp(jtp); } + if (parser instanceof Iso8601DateTimeParser iso) { + return (T) defaultRoundUp(iso); + } throw new IllegalArgumentException("Unknown parser implementation " + parser.getClass()); } @@ -78,6 +84,19 @@ private static JavaTimeDateTimeParser defaultRoundUp(JavaTimeDateTimeParser pars return new JavaTimeDateTimeParser(builder.toFormatter(parser.getLocale())); } + private static Iso8601DateTimeParser defaultRoundUp(Iso8601DateTimeParser parser) { + return parser.withDefaults( + Map.ofEntries( + entry(ChronoField.MONTH_OF_YEAR, 1), + entry(ChronoField.DAY_OF_MONTH, 1), + entry(ChronoField.HOUR_OF_DAY, 23), + entry(ChronoField.MINUTE_OF_HOUR, 59), + entry(ChronoField.SECOND_OF_MINUTE, 59), + entry(ChronoField.NANO_OF_SECOND, 999_999_999) + ) + ); + } + private final String format; private final DateTimePrinter printer; private final DateTimeParser[] parsers; diff --git a/server/src/test/java/org/elasticsearch/common/time/Iso8601ParserTests.java b/server/src/test/java/org/elasticsearch/common/time/Iso8601ParserTests.java new file mode 100644 index 000000000000..bfb03ea9496e --- /dev/null +++ b/server/src/test/java/org/elasticsearch/common/time/Iso8601ParserTests.java @@ -0,0 +1,427 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.common.time; + +import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matcher; + +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeFormatterBuilder; +import java.time.format.DateTimeParseException; +import java.time.format.ResolverStyle; +import java.time.format.SignStyle; +import java.time.temporal.ChronoField; +import java.time.temporal.TemporalAccessor; +import java.time.temporal.TemporalQueries; +import java.time.temporal.ValueRange; +import java.util.Locale; +import java.util.Map; +import java.util.Set; + +import static java.time.temporal.ChronoField.DAY_OF_MONTH; +import static java.time.temporal.ChronoField.HOUR_OF_DAY; +import static java.time.temporal.ChronoField.MINUTE_OF_HOUR; +import static java.time.temporal.ChronoField.MONTH_OF_YEAR; +import static java.time.temporal.ChronoField.NANO_OF_SECOND; +import static java.time.temporal.ChronoField.SECOND_OF_MINUTE; +import static java.time.temporal.ChronoField.YEAR; +import static org.elasticsearch.test.LambdaMatchers.transformedMatch; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; + +public class Iso8601ParserTests extends ESTestCase { + + private static Iso8601Parser defaultParser() { + return new Iso8601Parser(Set.of(), true, Map.of()); + } + + private static Matcher hasResult(DateTime dateTime) { + return transformedMatch(Iso8601Parser.Result::result, equalTo(dateTime)); + } + + private static Matcher hasError(int parseError) { + return transformedMatch(Iso8601Parser.Result::errorIndex, equalTo(parseError)); + } + + public void testStrangeParses() { + assertThat(defaultParser().tryParse("-9999-01-01", null), hasResult(new DateTime(-9999, 1, 1, null, null, null, null, null, null))); + assertThat(defaultParser().tryParse("1000", null), hasResult(new DateTime(1000, null, null, null, null, null, null, null, null))); + assertThat(defaultParser().tryParse("2023-02-02T", null), hasResult(new DateTime(2023, 2, 2, null, null, null, null, null, null))); + + // these are accepted by the previous formatters, but are not valid ISO8601 + assertThat(defaultParser().tryParse("2023-01-01T12:00:00.01,02", null), hasError(22)); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00Europe/Paris+0400", null), hasError(19)); + } + + public void testOutOfRange() { + assertThat(defaultParser().tryParse("2023-13-12", null), hasError(5)); + assertThat(defaultParser().tryParse("2023-12-32", null), hasError(8)); + assertThat(defaultParser().tryParse("2023-12-31T24", null), hasError(11)); + assertThat(defaultParser().tryParse("2023-12-31T23:60", null), hasError(14)); + assertThat(defaultParser().tryParse("2023-12-31T23:59:60", null), hasError(17)); + assertThat(defaultParser().tryParse("2023-12-31T23:59:59+18:30", null), hasError(19)); + } + + public void testMandatoryFields() { + assertThat( + new Iso8601Parser(Set.of(YEAR), true, Map.of()).tryParse("2023", null), + hasResult(new DateTime(2023, null, null, null, null, null, null, null, null)) + ); + assertThat(new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR), true, Map.of()).tryParse("2023", null), hasError(4)); + + assertThat( + new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR), true, Map.of()).tryParse("2023-06", null), + hasResult(new DateTime(2023, 6, null, null, null, null, null, null, null)) + ); + assertThat(new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH), true, Map.of()).tryParse("2023-06", null), hasError(7)); + + assertThat( + new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH), true, Map.of()).tryParse("2023-06-20", null), + hasResult(new DateTime(2023, 6, 20, null, null, null, null, null, null)) + ); + assertThat( + new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY), false, Map.of()).tryParse("2023-06-20", null), + hasError(10) + ); + + assertThat( + new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY), false, Map.of()).tryParse("2023-06-20T15", null), + hasResult(new DateTime(2023, 6, 20, 15, 0, 0, 0, null, null)) + ); + assertThat( + new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY, MINUTE_OF_HOUR), false, Map.of()).tryParse( + "2023-06-20T15", + null + ), + hasError(13) + ); + assertThat( + new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY, MINUTE_OF_HOUR), false, Map.of()).tryParse( + "2023-06-20T15Z", + null + ), + hasError(13) + ); + + assertThat( + new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY, MINUTE_OF_HOUR), false, Map.of()).tryParse( + "2023-06-20T15:48", + null + ), + hasResult(new DateTime(2023, 6, 20, 15, 48, 0, 0, null, null)) + ); + assertThat( + new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY, MINUTE_OF_HOUR, SECOND_OF_MINUTE), false, Map.of()) + .tryParse("2023-06-20T15:48", null), + hasError(16) + ); + assertThat( + new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY, MINUTE_OF_HOUR, SECOND_OF_MINUTE), false, Map.of()) + .tryParse("2023-06-20T15:48Z", null), + hasError(16) + ); + + assertThat( + new Iso8601Parser(Set.of(YEAR, MONTH_OF_YEAR, DAY_OF_MONTH, HOUR_OF_DAY, MINUTE_OF_HOUR, SECOND_OF_MINUTE), false, Map.of()) + .tryParse("2023-06-20T15:48:09", null), + hasResult(new DateTime(2023, 6, 20, 15, 48, 9, 0, null, null)) + ); + } + + public void testParseNanos() { + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00.5", null), + hasResult(new DateTime(2023, 1, 1, 12, 0, 0, 500_000_000, null, null)) + ); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00,5", null), + hasResult(new DateTime(2023, 1, 1, 12, 0, 0, 500_000_000, null, null)) + ); + + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00.05", null), + hasResult(new DateTime(2023, 1, 1, 12, 0, 0, 50_000_000, null, null)) + ); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00,005", null), + hasResult(new DateTime(2023, 1, 1, 12, 0, 0, 5_000_000, null, null)) + ); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00.0005", null), + hasResult(new DateTime(2023, 1, 1, 12, 0, 0, 500_000, null, null)) + ); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00,00005", null), + hasResult(new DateTime(2023, 1, 1, 12, 0, 0, 50_000, null, null)) + ); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00.000005", null), + hasResult(new DateTime(2023, 1, 1, 12, 0, 0, 5_000, null, null)) + ); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00,0000005", null), + hasResult(new DateTime(2023, 1, 1, 12, 0, 0, 500, null, null)) + ); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00.00000005", null), + hasResult(new DateTime(2023, 1, 1, 12, 0, 0, 50, null, null)) + ); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00,000000005", null), + hasResult(new DateTime(2023, 1, 1, 12, 0, 0, 5, null, null)) + ); + + // too many nanos + assertThat(defaultParser().tryParse("2023-01-01T12:00:00.0000000005", null), hasError(29)); + } + + private static Matcher hasTimezone(ZoneId offset) { + return transformedMatch(r -> r.result().query(TemporalQueries.zone()), equalTo(offset)); + } + + public void testParseTimezones() { + // using defaults + assertThat(defaultParser().tryParse("2023-01-01T12:00:00", null), hasTimezone(null)); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00", ZoneOffset.UTC), hasTimezone(ZoneOffset.UTC)); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00", ZoneOffset.ofHours(-3)), hasTimezone(ZoneOffset.ofHours(-3))); + + // timezone specified + assertThat(defaultParser().tryParse("2023-01-01T12:00:00Z", null), hasTimezone(ZoneOffset.UTC)); + + assertThat(defaultParser().tryParse("2023-01-01T12:00:00-05", null), hasTimezone(ZoneOffset.ofHours(-5))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00+11", null), hasTimezone(ZoneOffset.ofHours(11))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00+0830", null), hasTimezone(ZoneOffset.ofHoursMinutes(8, 30))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00-0415", null), hasTimezone(ZoneOffset.ofHoursMinutes(-4, -15))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00+08:30", null), hasTimezone(ZoneOffset.ofHoursMinutes(8, 30))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00-04:15", null), hasTimezone(ZoneOffset.ofHoursMinutes(-4, -15))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00+011030", null), hasTimezone(ZoneOffset.ofHoursMinutesSeconds(1, 10, 30))); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00-074520", null), + hasTimezone(ZoneOffset.ofHoursMinutesSeconds(-7, -45, -20)) + ); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00+01:10:30", null), + hasTimezone(ZoneOffset.ofHoursMinutesSeconds(1, 10, 30)) + ); + assertThat( + defaultParser().tryParse("2023-01-01T12:00:00-07:45:20", null), + hasTimezone(ZoneOffset.ofHoursMinutesSeconds(-7, -45, -20)) + ); + + assertThat(defaultParser().tryParse("2023-01-01T12:00:00GMT", null), hasTimezone(ZoneId.of("GMT"))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00UTC", null), hasTimezone(ZoneId.of("UTC"))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00UT", null), hasTimezone(ZoneId.of("UT"))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00GMT+3", null), hasTimezone(ZoneId.of("GMT+3"))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00UTC-4", null), hasTimezone(ZoneId.of("UTC-4"))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00UT+6", null), hasTimezone(ZoneId.of("UT+6"))); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00Europe/Paris", null), hasTimezone(ZoneId.of("Europe/Paris"))); + + // we could be more specific in the error index for invalid timezones, + // but that would require keeping track & propagating Result objects within date-time parsing just for the ZoneId + assertThat(defaultParser().tryParse("2023-01-01T12:00:00+04:0030", null), hasError(19)); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00+0400:30", null), hasError(19)); + assertThat(defaultParser().tryParse("2023-01-01T12:00:00Invalid", null), hasError(19)); + } + + private static void assertEquivalent(String text, DateTimeFormatter formatter) { + TemporalAccessor expected = formatter.parse(text); + TemporalAccessor actual = defaultParser().tryParse(text, null).result(); + assertThat(actual, is(notNullValue())); + + assertThat(actual.query(TemporalQueries.localDate()), equalTo(expected.query(TemporalQueries.localDate()))); + assertThat(actual.query(TemporalQueries.localTime()), equalTo(expected.query(TemporalQueries.localTime()))); + assertThat(actual.query(TemporalQueries.zone()), equalTo(expected.query(TemporalQueries.zone()))); + } + + private static void assertEquivalentFailure(String text, DateTimeFormatter formatter) { + DateTimeParseException expected = expectThrows(DateTimeParseException.class, () -> formatter.parse(text)); + int error = defaultParser().tryParse(text, null).errorIndex(); + assertThat(error, greaterThanOrEqualTo(0)); + + assertThat(error, equalTo(expected.getErrorIndex())); + } + + public void testEquivalence() { + // test that Iso8601Parser produces the same output as DateTimeFormatter + DateTimeFormatter mandatoryFormatter = new DateTimeFormatterBuilder().append(DateTimeFormatter.ISO_LOCAL_DATE_TIME) + .optionalStart() + .appendZoneOrOffsetId() + .optionalEnd() + .optionalStart() + .appendOffset("+HHmm", "Z") + .optionalEnd() + .toFormatter(Locale.ROOT) + .withResolverStyle(ResolverStyle.STRICT); + + // just checking timezones/ids here + assertEquivalent("2023-01-01T12:00:00", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00Z", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00UT", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00UTC", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00GMT", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00+00", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00-00", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00+05", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00+0500", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00+05:00", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00+05:00:30", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00-07", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00-0715", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00-07:15", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00UTC+05:00", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00GMT-09:45:30", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00Zulu", mandatoryFormatter); + assertEquivalent("2023-01-01T12:00:00Europe/Paris", mandatoryFormatter); + + assertEquivalentFailure("2023-01-01T12:00:00+5", mandatoryFormatter); + assertEquivalentFailure("2023-01-01T12:00:00-7", mandatoryFormatter); + assertEquivalentFailure("2023-01-01T12:00:00InvalidTimeZone", mandatoryFormatter); + + DateTimeFormatter allFieldsOptional = new DateTimeFormatterBuilder().appendValue(YEAR, 4, 4, SignStyle.EXCEEDS_PAD) + .optionalStart() + .appendLiteral('-') + .appendValue(MONTH_OF_YEAR, 2) + .optionalStart() + .appendLiteral('-') + .appendValue(DAY_OF_MONTH, 2) + .optionalStart() + .appendLiteral('T') + .appendValue(HOUR_OF_DAY, 2) + .optionalStart() + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 2) + .optionalStart() + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 2) + .optionalEnd() + .optionalEnd() + .optionalEnd() + .optionalEnd() + .optionalEnd() + .optionalStart() + .appendZoneOrOffsetId() + .optionalEnd() + .optionalStart() + .appendOffset("+HHmm", "Z") + .optionalEnd() + .toFormatter(Locale.ROOT) + .withResolverStyle(ResolverStyle.STRICT); + + assertEquivalent("2023", allFieldsOptional); + assertEquivalent("2023-04", allFieldsOptional); + assertEquivalent("2023-04-08", allFieldsOptional); + assertEquivalent("2023-04-08T13", allFieldsOptional); + assertEquivalent("2023-04-08T13:45", allFieldsOptional); + assertEquivalent("2023-04-08T13:45:50", allFieldsOptional); + assertEquivalent("-2023-04-08T13:45:50", allFieldsOptional); + } + + private static int randomValue(ValueRange range) { + assert range.isIntValue(); + return randomIntBetween((int) range.getMinimum(), (int) range.getMaximum()); + } + + public void testDefaults() { + Map defaults = Map.of( + MONTH_OF_YEAR, + randomValue(MONTH_OF_YEAR.range()), + DAY_OF_MONTH, + randomValue(DAY_OF_MONTH.range()), + HOUR_OF_DAY, + randomValue(HOUR_OF_DAY.range()), + MINUTE_OF_HOUR, + randomValue(MINUTE_OF_HOUR.range()), + SECOND_OF_MINUTE, + randomValue(SECOND_OF_MINUTE.range()), + NANO_OF_SECOND, + randomValue(NANO_OF_SECOND.range()) + ); + + assertThat( + new Iso8601Parser(Set.of(), true, defaults).tryParse("2023", null), + hasResult( + new DateTime( + 2023, + defaults.get(MONTH_OF_YEAR), + defaults.get(DAY_OF_MONTH), + defaults.get(HOUR_OF_DAY), + defaults.get(MINUTE_OF_HOUR), + defaults.get(SECOND_OF_MINUTE), + defaults.get(NANO_OF_SECOND), + null, + null + ) + ) + ); + assertThat( + new Iso8601Parser(Set.of(), true, defaults).tryParse("2023-01", null), + hasResult( + new DateTime( + 2023, + 1, + defaults.get(DAY_OF_MONTH), + defaults.get(HOUR_OF_DAY), + defaults.get(MINUTE_OF_HOUR), + defaults.get(SECOND_OF_MINUTE), + defaults.get(NANO_OF_SECOND), + null, + null + ) + ) + ); + assertThat( + new Iso8601Parser(Set.of(), true, defaults).tryParse("2023-01-01", null), + hasResult( + new DateTime( + 2023, + 1, + 1, + defaults.get(HOUR_OF_DAY), + defaults.get(MINUTE_OF_HOUR), + defaults.get(SECOND_OF_MINUTE), + defaults.get(NANO_OF_SECOND), + null, + null + ) + ) + ); + assertThat( + new Iso8601Parser(Set.of(), true, defaults).tryParse("2023-01-01T00", null), + hasResult( + new DateTime( + 2023, + 1, + 1, + 0, + defaults.get(MINUTE_OF_HOUR), + defaults.get(SECOND_OF_MINUTE), + defaults.get(NANO_OF_SECOND), + null, + null + ) + ) + ); + assertThat( + new Iso8601Parser(Set.of(), true, defaults).tryParse("2023-01-01T00:00", null), + hasResult(new DateTime(2023, 1, 1, 0, 0, defaults.get(SECOND_OF_MINUTE), defaults.get(NANO_OF_SECOND), null, null)) + ); + assertThat( + new Iso8601Parser(Set.of(), true, defaults).tryParse("2023-01-01T00:00:00", null), + hasResult(new DateTime(2023, 1, 1, 0, 0, 0, defaults.get(NANO_OF_SECOND), null, null)) + ); + assertThat( + new Iso8601Parser(Set.of(), true, defaults).tryParse("2023-01-01T00:00:00.0", null), + hasResult(new DateTime(2023, 1, 1, 0, 0, 0, 0, null, null)) + ); + } +}