From 6e15229f6e118d07a4917e4fad9871c5ed0fe840 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Fri, 2 Feb 2024 15:56:07 +0100 Subject: [PATCH 001/106] Make counted terms agg visible to profiling (#105049) The counted-terms aggregation is defined in its own plugin. When other plugins (such as the profiling plugin) want to use this aggregation, this leads to class loader issues, such as that the aggregation class is not recognized. By moving just the aggregation code itself to the server module but keeping everything else (including registration) in the `mapper-counted-keyword` module, we can use the counted-terms aggregation also from other plugins. --- server/src/main/java/module-info.java | 1 + .../CountedTermsAggregationBuilder.java | 11 ++--- .../countedterms}/CountedTermsAggregator.java | 7 +-- .../CountedTermsAggregatorFactory.java | 7 +-- .../CountedTermsAggregatorSupplier.java | 7 +-- .../CountedKeywordMapperPlugin.java | 1 + .../CountedTermsAggregationBuilderTests.java | 1 + .../CountedTermsAggregatorTests.java | 1 + .../TransportGetStackTracesAction.java | 2 +- .../rest-api-spec/test/profiling/10_basic.yml | 44 ++++++++++++++++++- 10 files changed, 66 insertions(+), 16 deletions(-) rename {x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword => server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms}/CountedTermsAggregationBuilder.java (93%) rename {x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword => server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms}/CountedTermsAggregator.java (96%) rename {x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword => server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms}/CountedTermsAggregatorFactory.java (95%) rename {x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword => server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms}/CountedTermsAggregatorSupplier.java (81%) diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index eddc96764273c..4bc5d95f06896 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -325,6 +325,7 @@ exports org.elasticsearch.search.aggregations; exports org.elasticsearch.search.aggregations.bucket; exports org.elasticsearch.search.aggregations.bucket.composite; + exports org.elasticsearch.search.aggregations.bucket.countedterms; exports org.elasticsearch.search.aggregations.bucket.filter; exports org.elasticsearch.search.aggregations.bucket.geogrid; exports org.elasticsearch.search.aggregations.bucket.global; diff --git a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregationBuilder.java similarity index 93% rename from x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregationBuilder.java index 31be7f149831d..4f71c964ebaf9 100644 --- a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregationBuilder.java @@ -1,11 +1,12 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. */ -package org.elasticsearch.xpack.countedkeyword; +package org.elasticsearch.search.aggregations.bucket.countedterms; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; @@ -56,7 +57,7 @@ public CountedTermsAggregationBuilder(String name) { super(name); } - protected CountedTermsAggregationBuilder( + public CountedTermsAggregationBuilder( ValuesSourceAggregationBuilder clone, AggregatorFactories.Builder factoriesBuilder, Map metadata @@ -64,7 +65,7 @@ protected CountedTermsAggregationBuilder( super(clone, factoriesBuilder, metadata); } - protected CountedTermsAggregationBuilder(StreamInput in) throws IOException { + public CountedTermsAggregationBuilder(StreamInput in) throws IOException { super(in); bucketCountThresholds = new TermsAggregator.BucketCountThresholds(in); } diff --git a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java similarity index 96% rename from x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java index 5e1b1e3624f00..588c53a2d1463 100644 --- a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java @@ -1,11 +1,12 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. */ -package org.elasticsearch.xpack.countedkeyword; +package org.elasticsearch.search.aggregations.bucket.countedterms; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.BytesRef; diff --git a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregatorFactory.java similarity index 95% rename from x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregatorFactory.java rename to server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregatorFactory.java index 3b8be76f14da8..430e28e96d5ee 100644 --- a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregatorFactory.java @@ -1,11 +1,12 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. */ -package org.elasticsearch.xpack.countedkeyword; +package org.elasticsearch.search.aggregations.bucket.countedterms; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.rest.RestStatus; diff --git a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregatorSupplier.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregatorSupplier.java similarity index 81% rename from x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregatorSupplier.java rename to server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregatorSupplier.java index 2817863f6b42c..979c99018e969 100644 --- a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregatorSupplier.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregatorSupplier.java @@ -1,11 +1,12 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. */ -package org.elasticsearch.xpack.countedkeyword; +package org.elasticsearch.search.aggregations.bucket.countedterms; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; diff --git a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordMapperPlugin.java b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordMapperPlugin.java index 62fb10be05f9d..43610ecede072 100644 --- a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordMapperPlugin.java +++ b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordMapperPlugin.java @@ -11,6 +11,7 @@ import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SearchPlugin; +import org.elasticsearch.search.aggregations.bucket.countedterms.CountedTermsAggregationBuilder; import java.util.ArrayList; import java.util.Collections; diff --git a/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregationBuilderTests.java b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregationBuilderTests.java index ba266e82fecc8..00740d8a0bd20 100644 --- a/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregationBuilderTests.java +++ b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregationBuilderTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; +import org.elasticsearch.search.aggregations.bucket.countedterms.CountedTermsAggregationBuilder; import java.util.Collection; import java.util.Collections; diff --git a/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregatorTests.java b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregatorTests.java index 02d629c7604ac..ef11c7dd3e9d9 100644 --- a/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregatorTests.java +++ b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedTermsAggregatorTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.index.mapper.TestDocumentParserContext; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.bucket.countedterms.CountedTermsAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.InternalTerms; import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import org.elasticsearch.xcontent.XContentParser; diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java index 7fac24a094aae..567c36e6b4404 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java @@ -31,6 +31,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregation; +import org.elasticsearch.search.aggregations.bucket.countedterms.CountedTermsAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.sampler.random.RandomSamplerAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; @@ -48,7 +49,6 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.ObjectPath; -import org.elasticsearch.xpack.countedkeyword.CountedTermsAggregationBuilder; import java.time.Duration; import java.time.Instant; diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml index 83522b05bf25d..684d554f08e58 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/profiling/10_basic.yml @@ -15,6 +15,18 @@ setup: wait_for_resources_created: true timeout: "1m" + - do: + indices.create: + index: test-events + body: + mappings: + properties: + "@timestamp": + type: date + format: epoch_second + events: + type: counted_keyword + - do: bulk: refresh: wait_for @@ -105,6 +117,8 @@ setup: - {"@timestamp": "1698624000", "Executable": {"build": {"id": "c5f89ea1c68710d2a493bb604c343a92c4f8ddeb"}, "file": {"name": "vmlinux"}}, "Symbolization": {"next_time": "4852491791"}, "ecs": {"version": "1.12.0"}} - {"create": {"_index": "profiling-hosts", "_id": "eLH27YsBj2lLi3tJYlvr"}} - {"profiling.project.id": 100, "host.id": "8457605156473051743", "@timestamp": 1700504426, "ecs.version": "1.12.0", "profiling.agent.build_timestamp": 1688111067, "profiling.instance.private_ipv4s": ["192.168.1.2"], "ec2.instance_life_cycle": "on-demand", "profiling.agent.config.map_scale_factor": 0, "ec2.instance_type": "i3.2xlarge", "profiling.host.ip": "192.168.1.2", "profiling.agent.config.bpf_log_level": 0, "profiling.host.sysctl.net.core.bpf_jit_enable": 1, "profiling.agent.config.file": "/etc/prodfiler/prodfiler.conf", "ec2.local_ipv4": "192.168.1.2", "profiling.agent.config.no_kernel_version_check": false, "profiling.host.machine": "x86_64", "profiling.host.tags": ["cloud_provider:aws", "cloud_environment:qa", "cloud_region:eu-west-1"], "profiling.agent.config.probabilistic_threshold": 100, "profiling.agent.config.disable_tls": false, "profiling.agent.config.tracers": "all", "profiling.agent.start_time": 1700090045589, "profiling.agent.config.max_elements_per_interval": 800, "ec2.placement.region": "eu-west-1", "profiling.agent.config.present_cpu_cores": 8, "profiling.host.kernel_version": "9.9.9-0-aws", "profiling.agent.config.bpf_log_size": 65536, "profiling.agent.config.known_traces_entries": 65536, "profiling.host.sysctl.kernel.unprivileged_bpf_disabled": 1, "profiling.agent.config.verbose": false, "profiling.agent.config.probabilistic_interval": "1m0s", "ec2.placement.availability_zone_id": "euw1-az1", "ec2.security_groups": "", "ec2.local_hostname": "ip-192-168-1-2.eu-west-1.compute.internal", "ec2.placement.availability_zone": "eu-west-1c", "profiling.agent.config.upload_symbols": false, "profiling.host.sysctl.kernel.bpf_stats_enabled": 0, "profiling.host.name": "ip-192-168-1-2", "ec2.mac": "00:11:22:33:44:55", "profiling.host.kernel_proc_version": "Linux version 9.9.9-0-aws", "profiling.agent.config.cache_directory": "/var/cache/optimyze/", "profiling.agent.version": "v8.12.0", "ec2.hostname": "ip-192-168-1-2.eu-west-1.compute.internal", "profiling.agent.config.elastic_mode": false, "ec2.ami_id": "ami-aaaaaaaaaaa", "ec2.instance_id": "i-0b999999999999999" } + - {"index": {"_index": "test-events"}} + - {"@timestamp": "1700504427", "events": ["S07KmaoGhvNte78xwwRbZQ"]} --- teardown: - do: @@ -151,12 +165,40 @@ teardown: - match: { stack_traces.S07KmaoGhvNte78xwwRbZQ.count: 1} --- -"Test flamegraph": +"Test flamegraph from profiling-events": + - do: + profiling.flamegraph: + body: > + { + "sample_size": 20000, + "requested_duration": 86400, + "query": { + "bool": { + "filter": [ + { + "range": { + "@timestamp": { + "gte": "2023-11-20", + "lt": "2023-11-21", + "format": "yyyy-MM-dd" + } + } + } + ] + } + } + } + - match: { Size: 47} + +--- +"Test flamegraph from test-events": - do: profiling.flamegraph: body: > { "sample_size": 20000, + "indices": "test-events", + "stacktrace_ids_field": "events", "requested_duration": 86400, "query": { "bool": { From da7bed3584bac69113778654e6ee0f82bdb4d993 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Przemys=C5=82aw=20Witek?= Date: Fri, 2 Feb 2024 16:16:42 +0100 Subject: [PATCH 002/106] [ML] Fix handling of `ml.config_version` node attribute (#105066) --- docs/changelog/105066.yaml | 5 ++ .../xpack/core/ml/MlConfigVersion.java | 2 +- .../xpack/core/ml/MlConfigVersionTests.java | 46 +++++++++++++++++++ .../ml/integration/MlPluginDisabledIT.java | 23 ++++++++-- .../xpack/ml/MachineLearning.java | 19 +++++--- 5 files changed, 84 insertions(+), 11 deletions(-) create mode 100644 docs/changelog/105066.yaml diff --git a/docs/changelog/105066.yaml b/docs/changelog/105066.yaml new file mode 100644 index 0000000000000..95757a9edaf81 --- /dev/null +++ b/docs/changelog/105066.yaml @@ -0,0 +1,5 @@ +pr: 105066 +summary: Fix handling of `ml.config_version` node attribute for nodes with machine learning disabled +area: Machine Learning +type: bug +issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlConfigVersion.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlConfigVersion.java index ffaa8489929ff..1b365bd96d834 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlConfigVersion.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlConfigVersion.java @@ -289,7 +289,7 @@ public static Tuple getMinMaxMlConfigVersion(D if (mlConfigVersion.after(maxMlConfigVersion)) { maxMlConfigVersion = mlConfigVersion; } - } catch (IllegalArgumentException e) { + } catch (IllegalStateException e) { // This means we encountered a node that is after 8.10.0 but has the ML plugin disabled - ignore it } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/MlConfigVersionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/MlConfigVersionTests.java index f97d9e1f21d07..34428c303a076 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/MlConfigVersionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/MlConfigVersionTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.node.VersionInformation; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.index.IndexVersion; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ml.utils.MlConfigVersionUtils; import org.hamcrest.Matchers; @@ -149,6 +150,51 @@ public void testGetMinMaxMlConfigVersion() { assertEquals(MlConfigVersion.V_10, MlConfigVersion.getMaxMlConfigVersion(nodes)); } + public void testGetMinMaxMlConfigVersionWhenMlConfigVersionAttrIsMissing() { + Map nodeAttr1 = Map.of(MlConfigVersion.ML_CONFIG_VERSION_NODE_ATTR, MlConfigVersion.V_7_1_0.toString()); + Map nodeAttr2 = Map.of(MlConfigVersion.ML_CONFIG_VERSION_NODE_ATTR, MlConfigVersion.V_8_2_0.toString()); + Map nodeAttr3 = Map.of(); + DiscoveryNodes nodes = DiscoveryNodes.builder() + .add( + DiscoveryNodeUtils.builder("_node_id1") + .name("_node_name1") + .address(new TransportAddress(InetAddress.getLoopbackAddress(), 9300)) + .attributes(nodeAttr1) + .roles(ROLES_WITH_ML) + .version(VersionInformation.inferVersions(Version.fromString("7.2.0"))) + .build() + ) + .add( + DiscoveryNodeUtils.builder("_node_id2") + .name("_node_name2") + .address(new TransportAddress(InetAddress.getLoopbackAddress(), 9301)) + .attributes(nodeAttr2) + .roles(ROLES_WITH_ML) + .version(VersionInformation.inferVersions(Version.fromString("7.1.0"))) + .build() + ) + .add( + DiscoveryNodeUtils.builder("_node_id3") + .name("_node_name3") + .address(new TransportAddress(InetAddress.getLoopbackAddress(), 9302)) + .attributes(nodeAttr3) + .roles(ROLES_WITH_ML) + .version( + new VersionInformation( + Version.V_8_11_0, + IndexVersion.getMinimumCompatibleIndexVersion(Version.V_8_11_0.id), + IndexVersion.fromId(Version.V_8_11_0.id) + ) + ) + .build() + ) + .build(); + + assertEquals(MlConfigVersion.V_7_1_0, MlConfigVersion.getMinMlConfigVersion(nodes)); + // _node_name3 is ignored + assertEquals(MlConfigVersion.V_8_2_0, MlConfigVersion.getMaxMlConfigVersion(nodes)); + } + public void testGetMlConfigVersionForNode() { DiscoveryNode node = DiscoveryNodeUtils.builder("_node_id4") .name("_node_name4") diff --git a/x-pack/plugin/ml/qa/disabled/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlPluginDisabledIT.java b/x-pack/plugin/ml/qa/disabled/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlPluginDisabledIT.java index 25ea9c5d2b27e..a518e0d496868 100644 --- a/x-pack/plugin/ml/qa/disabled/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlPluginDisabledIT.java +++ b/x-pack/plugin/ml/qa/disabled/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlPluginDisabledIT.java @@ -10,14 +10,19 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.Map; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.aMapWithSize; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; public class MlPluginDisabledIT extends ESRestTestCase { @@ -71,7 +76,19 @@ public void testActionsFail() throws Exception { public void testMlFeatureReset() throws IOException { Request request = new Request("POST", "/_features/_reset"); - Response response = client().performRequest(request); - assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); + assertOK(client().performRequest(request)); + } + + @SuppressWarnings("unchecked") + public void testAllNodesHaveMlConfigVersionAttribute() throws IOException { + Request request = new Request("GET", "/_nodes"); + Response response = assertOK(client().performRequest(request)); + var nodesMap = (Map) entityAsMap(response).get("nodes"); + assertThat(nodesMap, is(aMapWithSize(greaterThanOrEqualTo(1)))); + for (var nodeObj : nodesMap.values()) { + var nodeMap = (Map) nodeObj; + // We do not expect any specific version. The only important assertion is that the attribute exists. + assertThat(XContentMapValues.extractValue(nodeMap, "attributes", "ml.config_version"), is(notNullValue())); + } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index 7c0004a7532e8..6916a04084285 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -829,13 +829,19 @@ public Settings additionalSettings() { String allocatedProcessorsAttrName = "node.attr." + ALLOCATED_PROCESSORS_NODE_ATTR; String mlConfigVersionAttrName = "node.attr." + ML_CONFIG_VERSION_NODE_ATTR; - if (enabled == false) { - disallowMlNodeAttributes(maxOpenJobsPerNodeNodeAttrName, machineMemoryAttrName, jvmSizeAttrName, mlConfigVersionAttrName); - return Settings.EMPTY; - } - Settings.Builder additionalSettings = Settings.builder(); - if (DiscoveryNode.hasRole(settings, DiscoveryNodeRole.ML_ROLE)) { + + // The ML config version is needed for two related reasons even if ML is currently disabled on the node: + // 1. If ML is in use then decisions about minimum node versions need to include this node, and not + // having it available can cause exceptions during cluster state processing + // 2. It could be argued that reason 1 could be fixed by completely ignoring the node, however, + // then there would be a risk that ML is later enabled on an old node that was ignored, and + // some new ML feature that's been used is then incompatible with it + // The only safe approach is to consider which ML code _all_ nodes in the cluster are running, regardless + // of whether they currently have ML enabled. + addMlNodeAttribute(additionalSettings, mlConfigVersionAttrName, MlConfigVersion.CURRENT.toString()); + + if (enabled && DiscoveryNode.hasRole(settings, DiscoveryNodeRole.ML_ROLE)) { addMlNodeAttribute( additionalSettings, machineMemoryAttrName, @@ -859,7 +865,6 @@ public Settings additionalSettings() { allocatedProcessorsAttrName ); } - addMlNodeAttribute(additionalSettings, mlConfigVersionAttrName, MlConfigVersion.CURRENT.toString()); return additionalSettings.build(); } From 8b7c777b5407c6efdd91c3a1df4d258ed4b28903 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Slobodan=20Adamovi=C4=87?= Date: Fri, 2 Feb 2024 17:37:14 +0100 Subject: [PATCH 003/106] Validate settings before reloading JWT shared secret (#105070) This PR adds missing validation before reloading JWT shared secret settings. The shared secret setting must always be configured when the client authentication type is `shared_secret` and omitted when it's `none`. --- docs/changelog/105070.yaml | 5 + .../xpack/security/authc/jwt/JwtRestIT.java | 13 +- .../authc/jwt/JwtRealmSingleNodeTests.java | 119 +++++++++++++++++- .../xpack/security/authc/jwt/JwtRealm.java | 17 ++- 4 files changed, 143 insertions(+), 11 deletions(-) create mode 100644 docs/changelog/105070.yaml diff --git a/docs/changelog/105070.yaml b/docs/changelog/105070.yaml new file mode 100644 index 0000000000000..ff4c115e21eea --- /dev/null +++ b/docs/changelog/105070.yaml @@ -0,0 +1,5 @@ +pr: 105070 +summary: Validate settings before reloading JWT shared secret +area: Authentication +type: bug +issues: [] diff --git a/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtRestIT.java b/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtRestIT.java index 52d87c2e32c87..aef0ec95372cf 100644 --- a/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtRestIT.java +++ b/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtRestIT.java @@ -542,15 +542,12 @@ public void testReloadClientSecret() throws Exception { // secret updated, so authentication succeeds getSecurityClient(buildAndSignJwtForRealm2(principal), Optional.of(newValidSharedSecret)).authenticate(); - // removing setting also works and leads to authentication failure + // removing setting should not work since it can + // lead to inconsistency in realm's configuration + // and eventual authentication failures writeSettingToKeystoreThenReload("xpack.security.authc.realms.jwt.jwt2.client_authentication.shared_secret", null); - assertThat( - expectThrows( - ResponseException.class, - () -> getSecurityClient(buildAndSignJwtForRealm2(principal), Optional.of(newValidSharedSecret)).authenticate() - ).getResponse(), - hasStatusCode(RestStatus.UNAUTHORIZED) - ); + getSecurityClient(buildAndSignJwtForRealm2(principal), Optional.of(newValidSharedSecret)).authenticate(); + } finally { // Restore setting for other tests writeSettingToKeystoreThenReload( diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtRealmSingleNodeTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtRealmSingleNodeTests.java index c9b43afd4322d..ac033ba75798a 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtRealmSingleNodeTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtRealmSingleNodeTests.java @@ -16,6 +16,7 @@ import com.nimbusds.jwt.SignedJWT; import org.apache.http.HttpEntity; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; @@ -61,10 +62,12 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.stream.Collectors; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.WAIT_UNTIL; +import static org.elasticsearch.xpack.core.security.authc.jwt.JwtRealmSettings.CLIENT_AUTHENTICATION_TYPE; import static org.elasticsearch.xpack.core.security.authc.jwt.JwtRealmSettings.CLIENT_AUTH_SHARED_SECRET_ROTATION_GRACE_PERIOD; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; @@ -124,7 +127,20 @@ protected Settings nodeSettings() { .put("xpack.security.authc.realms.jwt.jwt2.claims.groups", "groups") .put("xpack.security.authc.realms.jwt.jwt2.client_authentication.type", "shared_secret") .put("xpack.security.authc.realms.jwt.jwt2.client_authentication.rotation_grace_period", "0s") - .putList("xpack.security.authc.realms.jwt.jwt2.allowed_signature_algorithms", "HS256", "HS384"); + .putList("xpack.security.authc.realms.jwt.jwt2.allowed_signature_algorithms", "HS256", "HS384") + // 4th JWT realm + .put("xpack.security.authc.realms.jwt.jwt3.order", 40) + .put("xpack.security.authc.realms.jwt.jwt3.token_type", "id_token") + .put("xpack.security.authc.realms.jwt.jwt3.allowed_issuer", "my-issuer-04") + .put("xpack.security.authc.realms.jwt.jwt3.allowed_subjects", "user-04") + .put("xpack.security.authc.realms.jwt.jwt3.allowed_audiences", "es-04") + .put("xpack.security.authc.realms.jwt.jwt3.claims.principal", "sub") + .put("xpack.security.authc.realms.jwt.jwt3.claims.groups", "groups") + .put("xpack.security.authc.realms.jwt.jwt3.client_authentication.type", "NONE") + .put( + "xpack.security.authc.realms.jwt.jwt3.pkc_jwkset_path", + getDataPath("/org/elasticsearch/xpack/security/authc/apikey/rsa-public-jwkset.json") + ); SecuritySettingsSource.addSecureSettings(builder, secureSettings -> { secureSettings.setString("xpack.security.authc.realms.jwt.jwt0.hmac_key", jwtHmacKey); @@ -491,6 +507,103 @@ public void testClientSecretRotation() throws Exception { } } + public void testValidationDuringReloadingClientSecrets() { + final Map realmsByName = getJwtRealms().stream().collect(Collectors.toMap(Realm::name, r -> r)); + final Set realmsWithSharedSecret = Set.of(realmsByName.get("jwt0"), realmsByName.get("jwt1"), realmsByName.get("jwt2")); + final JwtRealm realmWithoutSharedSecret = realmsByName.get("jwt3"); + + // Sanity check all client_authentication.type settings. + for (JwtRealm realm : realmsWithSharedSecret) { + assertThat(getClientAuthenticationType(realm), equalTo(JwtRealmSettings.ClientAuthenticationType.SHARED_SECRET)); + } + assertThat(getClientAuthenticationType(realmWithoutSharedSecret), equalTo(JwtRealmSettings.ClientAuthenticationType.NONE)); + + // Randomly chose one JWT realm which requires shared secret and omit it. + final MockSecureSettings newSecureSettings = new MockSecureSettings(); + final JwtRealm chosenRealmToRemoveSharedSecret = randomFrom(realmsWithSharedSecret); + for (JwtRealm realm : realmsWithSharedSecret) { + if (realm != chosenRealmToRemoveSharedSecret) { + newSecureSettings.setString( + "xpack.security.authc.realms.jwt." + realm.name() + ".client_authentication.shared_secret", + realm.name() + "_shared_secret" + ); + } + } + + // Reload settings and check if validation prevented updating for randomly chosen realm. + final PluginsService plugins = getInstanceFromNode(PluginsService.class); + final LocalStateSecurity localStateSecurity = plugins.filterPlugins(LocalStateSecurity.class).findFirst().get(); + final Security securityPlugin = localStateSecurity.plugins() + .stream() + .filter(p -> p instanceof Security) + .map(Security.class::cast) + .findFirst() + .orElseThrow(() -> new IllegalStateException("Security plugin not found!")); + + Settings.Builder newSettingsBuilder = Settings.builder().setSecureSettings(newSecureSettings); + { + var e = expectThrows(ElasticsearchException.class, () -> securityPlugin.reload(newSettingsBuilder.build())); + assertThat(e.getMessage(), containsString("secure settings reload failed for one or more security component")); + + var suppressedExceptions = e.getSuppressed(); + assertThat(suppressedExceptions.length, equalTo(1)); + assertThat(suppressedExceptions[0].getMessage(), containsString("secure settings reload failed for one or more realms")); + + var realmSuppressedExceptions = suppressedExceptions[0].getSuppressed(); + assertThat(realmSuppressedExceptions.length, equalTo(1)); + assertThat( + realmSuppressedExceptions[0].getMessage(), + containsString( + "Missing setting for [xpack.security.authc.realms.jwt." + + chosenRealmToRemoveSharedSecret.name() + + ".client_authentication.shared_secret]. It is required when setting [xpack.security.authc.realms.jwt." + + chosenRealmToRemoveSharedSecret.name() + + ".client_authentication.type] is [" + + JwtRealmSettings.ClientAuthenticationType.SHARED_SECRET.value() + + "]" + ) + ); + } + + // Add missing required shared secret setting in order + // to avoid raising an exception for realm which has + // client_authentication.type set to shared_secret. + newSecureSettings.setString( + "xpack.security.authc.realms.jwt." + chosenRealmToRemoveSharedSecret.name() + ".client_authentication.shared_secret", + chosenRealmToRemoveSharedSecret.name() + "_shared_secret" + ); + // Add shared secret for realm which does not require it, + // because it has client_authentication.type set to NONE. + newSecureSettings.setString( + "xpack.security.authc.realms.jwt." + realmWithoutSharedSecret.name() + ".client_authentication.shared_secret", + realmWithoutSharedSecret.name() + "_shared_secret" + ); + + { + var e = expectThrows(ElasticsearchException.class, () -> securityPlugin.reload(newSettingsBuilder.build())); + assertThat(e.getMessage(), containsString("secure settings reload failed for one or more security component")); + + var suppressedExceptions = e.getSuppressed(); + assertThat(suppressedExceptions.length, equalTo(1)); + assertThat(suppressedExceptions[0].getMessage(), containsString("secure settings reload failed for one or more realms")); + + var realmSuppressedExceptions = suppressedExceptions[0].getSuppressed(); + assertThat(realmSuppressedExceptions.length, equalTo(1)); + assertThat( + realmSuppressedExceptions[0].getMessage(), + containsString( + "Setting [xpack.security.authc.realms.jwt." + + realmWithoutSharedSecret.name() + + ".client_authentication.shared_secret] is not supported, because setting [xpack.security.authc.realms.jwt." + + realmWithoutSharedSecret.name() + + ".client_authentication.type] is [" + + JwtRealmSettings.ClientAuthenticationType.NONE.value() + + "]" + ) + ); + } + } + private SignedJWT getSignedJWT(JWTClaimsSet claimsSet, byte[] hmacKeyBytes) throws Exception { JWSHeader jwtHeader = new JWSHeader.Builder(JWSAlgorithm.HS256).build(); OctetSequenceKey.Builder jwt0signer = new OctetSequenceKey.Builder(hmacKeyBytes); @@ -517,6 +630,10 @@ private TimeValue getGracePeriod(JwtRealm realm) { return realm.getConfig().getConcreteSetting(CLIENT_AUTH_SHARED_SECRET_ROTATION_GRACE_PERIOD).get(realm.getConfig().settings()); } + private JwtRealmSettings.ClientAuthenticationType getClientAuthenticationType(JwtRealm realm) { + return realm.getConfig().getConcreteSetting(CLIENT_AUTHENTICATION_TYPE).get(realm.getConfig().settings()); + } + private void assertJwtToken(JwtAuthenticationToken token, String tokenPrincipal, String sharedSecret, SignedJWT signedJWT) throws ParseException { assertThat(token.principal(), equalTo(tokenPrincipal)); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtRealm.java index bef342d330f34..a541eef2f07f6 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/jwt/JwtRealm.java @@ -404,8 +404,21 @@ public void usageStats(final ActionListener> listener) { @Override public void reload(Settings settings) { - var clientSecret = CLIENT_AUTHENTICATION_SHARED_SECRET.getConcreteSettingForNamespace(this.realmRef().getName()).get(settings); - this.clientAuthenticationSharedSecret.rotate(clientSecret, config.getSetting(CLIENT_AUTH_SHARED_SECRET_ROTATION_GRACE_PERIOD)); + final SecureString newClientSharedSecret = CLIENT_AUTHENTICATION_SHARED_SECRET.getConcreteSettingForNamespace( + this.realmRef().getName() + ).get(settings); + + JwtUtil.validateClientAuthenticationSettings( + RealmSettings.getFullSettingKey(this.config, JwtRealmSettings.CLIENT_AUTHENTICATION_TYPE), + this.clientAuthenticationType, + RealmSettings.getFullSettingKey(this.config, JwtRealmSettings.CLIENT_AUTHENTICATION_SHARED_SECRET), + new RotatableSecret(newClientSharedSecret) + ); + + this.clientAuthenticationSharedSecret.rotate( + newClientSharedSecret, + config.getSetting(CLIENT_AUTH_SHARED_SECRET_ROTATION_GRACE_PERIOD) + ); } /** From 54088839b47e2bfffa31c4bee309136cd4b23e48 Mon Sep 17 00:00:00 2001 From: Moritz Mack Date: Fri, 2 Feb 2024 18:13:00 +0100 Subject: [PATCH 004/106] Do not enable APM agent 'instrument', it's not required for manual tracing. (#105055) --- docs/changelog/105055.yaml | 5 +++++ .../telemetry/apm/internal/APMAgentSettings.java | 5 ++--- .../telemetry/apm/internal/APMAgentSettingsTests.java | 8 -------- 3 files changed, 7 insertions(+), 11 deletions(-) create mode 100644 docs/changelog/105055.yaml diff --git a/docs/changelog/105055.yaml b/docs/changelog/105055.yaml new file mode 100644 index 0000000000000..0db70a6b9e558 --- /dev/null +++ b/docs/changelog/105055.yaml @@ -0,0 +1,5 @@ +pr: 105055 +summary: "Do not enable APM agent 'instrument', it's not required for manual tracing" +area: Infra/Core +type: bug +issues: [] diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java index 88359d32a628c..0bbaca00d1e2e 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java @@ -44,7 +44,6 @@ public void addClusterSettingsListeners(ClusterService clusterService, APMTeleme clusterSettings.addSettingsUpdateConsumer(TELEMETRY_TRACING_ENABLED_SETTING, enabled -> { apmTracer.setEnabled(enabled); - this.setAgentSetting("instrument", Boolean.toString(enabled)); // The agent records data other than spans, e.g. JVM metrics, so we toggle this setting in order to // minimise its impact to a running Elasticsearch. boolean recording = enabled || clusterSettings.get(TELEMETRY_METRICS_ENABLED_SETTING); @@ -73,7 +72,6 @@ public void initAgentSystemProperties(Settings settings) { boolean metrics = TELEMETRY_METRICS_ENABLED_SETTING.get(settings); this.setAgentSetting("recording", Boolean.toString(tracing || metrics)); - this.setAgentSetting("instrument", Boolean.toString(tracing)); // Apply values from the settings in the cluster state APM_AGENT_SETTINGS.getAsMap(settings).forEach(this::setAgentSetting); } @@ -120,7 +118,8 @@ public void setAgentSetting(String key, String value) { // Core: // forbid 'enabled', must remain enabled to dynamically enable tracing / metrics - // forbid 'recording' / 'instrument', controlled by 'telemetry.metrics.enabled' / 'telemetry.tracing.enabled' + // forbid 'recording', controlled by 'telemetry.metrics.enabled' / 'telemetry.tracing.enabled' + // forbid 'instrument', automatic instrumentation can cause issues "service_name", "service_node_name", // forbid 'service_version', forced by APMJvmOptions diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettingsTests.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettingsTests.java index d7ae93aded3de..f075f4fc39cfd 100644 --- a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettingsTests.java +++ b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettingsTests.java @@ -60,13 +60,11 @@ public void testEnableTracing() { apmAgentSettings.initAgentSystemProperties(update); verify(apmAgentSettings).setAgentSetting("recording", "true"); - verify(apmAgentSettings).setAgentSetting("instrument", "true"); clearInvocations(apmAgentSettings); Settings initial = Settings.builder().put(update).put(TELEMETRY_TRACING_ENABLED_SETTING.getKey(), false).build(); triggerUpdateConsumer(initial, update); verify(apmAgentSettings).setAgentSetting("recording", "true"); - verify(apmAgentSettings).setAgentSetting("instrument", "true"); verify(apmTelemetryProvider.getTracer()).setEnabled(true); } } @@ -76,7 +74,6 @@ public void testEnableTracingUsingLegacySetting() { apmAgentSettings.initAgentSystemProperties(settings); verify(apmAgentSettings).setAgentSetting("recording", "true"); - verify(apmAgentSettings).setAgentSetting("instrument", "true"); } public void testEnableMetrics() { @@ -90,7 +87,6 @@ public void testEnableMetrics() { apmAgentSettings.initAgentSystemProperties(update); verify(apmAgentSettings).setAgentSetting("recording", "true"); - verify(apmAgentSettings).setAgentSetting("instrument", Boolean.toString(tracingEnabled)); clearInvocations(apmAgentSettings); Settings initial = Settings.builder().put(update).put(TELEMETRY_METRICS_ENABLED_SETTING.getKey(), false).build(); @@ -114,13 +110,11 @@ public void testDisableTracing() { apmAgentSettings.initAgentSystemProperties(update); verify(apmAgentSettings).setAgentSetting("recording", Boolean.toString(metricsEnabled)); - verify(apmAgentSettings).setAgentSetting("instrument", "false"); clearInvocations(apmAgentSettings); Settings initial = Settings.builder().put(update).put(TELEMETRY_TRACING_ENABLED_SETTING.getKey(), true).build(); triggerUpdateConsumer(initial, update); verify(apmAgentSettings).setAgentSetting("recording", Boolean.toString(metricsEnabled)); - verify(apmAgentSettings).setAgentSetting("instrument", "false"); verify(apmTelemetryProvider.getTracer()).setEnabled(false); } } @@ -130,7 +124,6 @@ public void testDisableTracingUsingLegacySetting() { apmAgentSettings.initAgentSystemProperties(settings); verify(apmAgentSettings).setAgentSetting("recording", "false"); - verify(apmAgentSettings).setAgentSetting("instrument", "false"); } public void testDisableMetrics() { @@ -144,7 +137,6 @@ public void testDisableMetrics() { apmAgentSettings.initAgentSystemProperties(update); verify(apmAgentSettings).setAgentSetting("recording", Boolean.toString(tracingEnabled)); - verify(apmAgentSettings).setAgentSetting("instrument", Boolean.toString(tracingEnabled)); clearInvocations(apmAgentSettings); Settings initial = Settings.builder().put(update).put(TELEMETRY_METRICS_ENABLED_SETTING.getKey(), true).build(); From 7d9253ed35bde1493bb905015210dec412324066 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Fri, 2 Feb 2024 20:50:26 +0100 Subject: [PATCH 005/106] Avoid creating map allocator when writing empty maps to StreamOutput (#105071) It's in the title. We already have the size here and allocating the iterator isn't free. In fact it's 10G of allocations during http_logs indexing that we can avoid with a simple condition. --- .../elasticsearch/common/io/stream/StreamOutput.java | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java index a0b62bdabc08b..a3350c4526a91 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java @@ -596,10 +596,13 @@ public final void writeMap(final Map< * @param valueWriter The value writer */ public final void writeMap(final Map map, final Writer keyWriter, final Writer valueWriter) throws IOException { - writeVInt(map.size()); - for (final Map.Entry entry : map.entrySet()) { - keyWriter.write(this, entry.getKey()); - valueWriter.write(this, entry.getValue()); + int size = map.size(); + writeVInt(size); + if (size > 0) { + for (final Map.Entry entry : map.entrySet()) { + keyWriter.write(this, entry.getKey()); + valueWriter.write(this, entry.getValue()); + } } } From 64a790f8fec85b85025a4848cf77a82bc61b5d05 Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Fri, 2 Feb 2024 15:53:44 -0600 Subject: [PATCH 006/106] Modifying ingest request builders (#104636) This changes all of our ingest-related builders other than BulkRequestBuilder (which was already changed in #104927) to inherit from ActionRequestLazyBuilder (added in #104927) rather than ActionRequestBuilder. This means that the requests will not be created until the builder's request() method is called, making upcoming ref counting work much more feasible. --- docs/changelog/104636.yaml | 5 + .../action/bulk/BulkRequestBuilder.java | 13 +- .../action/delete/DeleteRequestBuilder.java | 76 ++++- .../action/index/IndexRequestBuilder.java | 200 +++++++++-- .../ReplicationRequestBuilder.java | 53 ++- .../InstanceShardOperationRequestBuilder.java | 45 ++- .../action/update/UpdateRequestBuilder.java | 313 +++++++++++++++--- .../AbstractBulkByScrollRequestBuilder.java | 87 ++++- ...stractBulkIndexByScrollRequestBuilder.java | 16 +- .../index/reindex/DeleteByQueryRequest.java | 2 +- .../reindex/DeleteByQueryRequestBuilder.java | 34 +- .../index/reindex/ReindexRequest.java | 2 +- .../index/reindex/ReindexRequestBuilder.java | 42 ++- .../index/reindex/UpdateByQueryRequest.java | 2 +- .../reindex/UpdateByQueryRequestBuilder.java | 39 ++- .../delete/DeleteRequestBuilderTests.java | 28 ++ .../index/IndexRequestBuilderTests.java | 17 + .../update/UpdateRequestBuilderTests.java | 39 +++ .../elasticsearch/test/ESIntegTestCase.java | 2 +- 19 files changed, 887 insertions(+), 128 deletions(-) create mode 100644 docs/changelog/104636.yaml create mode 100644 server/src/test/java/org/elasticsearch/action/delete/DeleteRequestBuilderTests.java create mode 100644 server/src/test/java/org/elasticsearch/action/update/UpdateRequestBuilderTests.java diff --git a/docs/changelog/104636.yaml b/docs/changelog/104636.yaml new file mode 100644 index 0000000000000..d74682f2eba18 --- /dev/null +++ b/docs/changelog/104636.yaml @@ -0,0 +1,5 @@ +pr: 104636 +summary: Modifying request builders +area: Ingest Node +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestBuilder.java index 16e5430063650..0d993d797b287 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestBuilder.java @@ -8,11 +8,9 @@ package org.elasticsearch.action.bulk; -import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestLazyBuilder; -import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.DocWriteRequest; -import org.elasticsearch.action.RequestBuilder; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteRequestBuilder; import org.elasticsearch.action.index.IndexRequest; @@ -46,7 +44,8 @@ public class BulkRequestBuilder extends ActionRequestLazyBuilder> requests = new ArrayList<>(); private final List framedData = new ArrayList<>(); - private final List> requestBuilders = new ArrayList<>(); + private final List, ? extends DocWriteResponse>> requestBuilders = + new ArrayList<>(); private ActiveShardCount waitForActiveShards; private TimeValue timeout; private String timeoutString; @@ -204,9 +203,9 @@ public BulkRequestBuilder setRefreshPolicy(String refreshPolicy) { public BulkRequest request() { validate(); BulkRequest request = new BulkRequest(globalIndex); - for (RequestBuilder requestBuilder : requestBuilders) { - ActionRequest childRequest = requestBuilder.request(); - request.add((DocWriteRequest) childRequest); + for (ActionRequestLazyBuilder, ? extends DocWriteResponse> requestBuilder : requestBuilders) { + DocWriteRequest childRequest = requestBuilder.request(); + request.add(childRequest); } for (DocWriteRequest childRequest : requests) { request.add(childRequest); diff --git a/server/src/main/java/org/elasticsearch/action/delete/DeleteRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/delete/DeleteRequestBuilder.java index f99bea1a64821..dac5421bdeee0 100644 --- a/server/src/main/java/org/elasticsearch/action/delete/DeleteRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/delete/DeleteRequestBuilder.java @@ -8,6 +8,7 @@ package org.elasticsearch.action.delete; +import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.WriteRequestBuilder; import org.elasticsearch.action.support.replication.ReplicationRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; @@ -21,15 +22,25 @@ public class DeleteRequestBuilder extends ReplicationRequestBuilder { + private String id; + private String routing; + private Long version; + private VersionType versionType; + private Long seqNo; + private Long term; + private WriteRequest.RefreshPolicy refreshPolicy; + private String refreshPolicyString; + public DeleteRequestBuilder(ElasticsearchClient client, @Nullable String index) { - super(client, TransportDeleteAction.TYPE, new DeleteRequest(index)); + super(client, TransportDeleteAction.TYPE); + setIndex(index); } /** * Sets the id of the document to delete. */ public DeleteRequestBuilder setId(String id) { - request.id(id); + this.id = id; return this; } @@ -38,7 +49,7 @@ public DeleteRequestBuilder setId(String id) { * and not the id. */ public DeleteRequestBuilder setRouting(String routing) { - request.routing(routing); + this.routing = routing; return this; } @@ -47,7 +58,7 @@ public DeleteRequestBuilder setRouting(String routing) { * version exists and no changes happened on the doc since then. */ public DeleteRequestBuilder setVersion(long version) { - request.version(version); + this.version = version; return this; } @@ -55,7 +66,7 @@ public DeleteRequestBuilder setVersion(long version) { * Sets the type of versioning to use. Defaults to {@link VersionType#INTERNAL}. */ public DeleteRequestBuilder setVersionType(VersionType versionType) { - request.versionType(versionType); + this.versionType = versionType; return this; } @@ -67,7 +78,7 @@ public DeleteRequestBuilder setVersionType(VersionType versionType) { * {@link org.elasticsearch.index.engine.VersionConflictEngineException} will be thrown. */ public DeleteRequestBuilder setIfSeqNo(long seqNo) { - request.setIfSeqNo(seqNo); + this.seqNo = seqNo; return this; } @@ -79,8 +90,59 @@ public DeleteRequestBuilder setIfSeqNo(long seqNo) { * {@link org.elasticsearch.index.engine.VersionConflictEngineException} will be thrown. */ public DeleteRequestBuilder setIfPrimaryTerm(long term) { - request.setIfPrimaryTerm(term); + this.term = term; + return this; + } + + @Override + public DeleteRequestBuilder setRefreshPolicy(WriteRequest.RefreshPolicy refreshPolicy) { + this.refreshPolicy = refreshPolicy; + return this; + } + + @Override + public DeleteRequestBuilder setRefreshPolicy(String refreshPolicy) { + this.refreshPolicyString = refreshPolicy; return this; } + @Override + public DeleteRequest request() { + validate(); + DeleteRequest request = new DeleteRequest(); + super.apply(request); + if (id != null) { + request.id(id); + } + if (routing != null) { + request.routing(routing); + } + if (version != null) { + request.version(version); + } + if (versionType != null) { + request.versionType(versionType); + } + if (seqNo != null) { + request.setIfSeqNo(seqNo); + } + if (term != null) { + request.setIfPrimaryTerm(term); + } + if (refreshPolicy != null) { + request.setRefreshPolicy(refreshPolicy); + } + if (refreshPolicyString != null) { + request.setRefreshPolicy(refreshPolicyString); + } + return request; + } + + @Override + protected void validate() throws IllegalStateException { + super.validate(); + if (refreshPolicy != null && refreshPolicyString != null) { + throw new IllegalStateException("Must use only one setRefreshPolicy method"); + } + } } diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java index b8faf39514cbe..3b6d07d200e29 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.WriteRequestBuilder; import org.elasticsearch.action.support.replication.ReplicationRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; @@ -27,13 +28,44 @@ public class IndexRequestBuilder extends ReplicationRequestBuilder implements WriteRequestBuilder { + private String id = null; + /* + * The following variables hold information about the source of the request. Only one of sourceMap, sourceArray, sourceString, + * sourceBytesReference, or sourceBytes can actually be used. When request() is called it makes sure that only one is set. + */ + private Map sourceMap; + private Object[] sourceArray; + private XContentBuilder sourceXContentBuilder; + private String sourceString; + private BytesReference sourceBytesReference; + private byte[] sourceBytes; + // Optionally used with sourceBytes: + private Integer sourceOffset; + // Optionally used with sourceBytes: + private Integer sourceLength; + // Optionally used with sourceMap, sourceArray, sourceString, sourceBytesReference, or sourceBytes: + private XContentType sourceContentType; + + private String pipeline; + private Boolean requireAlias; + private Boolean requireDataStream; + private String routing; + private WriteRequest.RefreshPolicy refreshPolicy; + private String refreshPolicyString; + private Long ifSeqNo; + private Long ifPrimaryTerm; + private DocWriteRequest.OpType opType; + private Boolean create; + private Long version; + private VersionType versionType; public IndexRequestBuilder(ElasticsearchClient client) { - super(client, TransportIndexAction.TYPE, new IndexRequest()); + this(client, null); } public IndexRequestBuilder(ElasticsearchClient client, @Nullable String index) { - super(client, TransportIndexAction.TYPE, new IndexRequest(index)); + super(client, TransportIndexAction.TYPE); + setIndex(index); } /** @@ -41,7 +73,7 @@ public IndexRequestBuilder(ElasticsearchClient client, @Nullable String index) { * generated. */ public IndexRequestBuilder setId(String id) { - request.id(id); + this.id = id; return this; } @@ -50,7 +82,7 @@ public IndexRequestBuilder setId(String id) { * and not the id. */ public IndexRequestBuilder setRouting(String routing) { - request.routing(routing); + this.routing = routing; return this; } @@ -58,7 +90,8 @@ public IndexRequestBuilder setRouting(String routing) { * Sets the source. */ public IndexRequestBuilder setSource(BytesReference source, XContentType xContentType) { - request.source(source, xContentType); + this.sourceBytesReference = source; + this.sourceContentType = xContentType; return this; } @@ -68,7 +101,7 @@ public IndexRequestBuilder setSource(BytesReference source, XContentType xConten * @param source The map to index */ public IndexRequestBuilder setSource(Map source) { - request.source(source); + this.sourceMap = source; return this; } @@ -78,7 +111,8 @@ public IndexRequestBuilder setSource(Map source) { * @param source The map to index */ public IndexRequestBuilder setSource(Map source, XContentType contentType) { - request.source(source, contentType); + this.sourceMap = source; + this.sourceContentType = contentType; return this; } @@ -89,7 +123,8 @@ public IndexRequestBuilder setSource(Map source, XContentType content * or using the {@link #setSource(byte[], XContentType)}. */ public IndexRequestBuilder setSource(String source, XContentType xContentType) { - request.source(source, xContentType); + this.sourceString = source; + this.sourceContentType = xContentType; return this; } @@ -97,7 +132,7 @@ public IndexRequestBuilder setSource(String source, XContentType xContentType) { * Sets the content source to index. */ public IndexRequestBuilder setSource(XContentBuilder sourceBuilder) { - request.source(sourceBuilder); + this.sourceXContentBuilder = sourceBuilder; return this; } @@ -105,7 +140,8 @@ public IndexRequestBuilder setSource(XContentBuilder sourceBuilder) { * Sets the document to index in bytes form. */ public IndexRequestBuilder setSource(byte[] source, XContentType xContentType) { - request.source(source, xContentType); + this.sourceBytes = source; + this.sourceContentType = xContentType; return this; } @@ -119,7 +155,10 @@ public IndexRequestBuilder setSource(byte[] source, XContentType xContentType) { * @param xContentType The type/format of the source */ public IndexRequestBuilder setSource(byte[] source, int offset, int length, XContentType xContentType) { - request.source(source, offset, length, xContentType); + this.sourceBytes = source; + this.sourceOffset = offset; + this.sourceLength = length; + this.sourceContentType = xContentType; return this; } @@ -132,7 +171,10 @@ public IndexRequestBuilder setSource(byte[] source, int offset, int length, XCon *

*/ public IndexRequestBuilder setSource(Object... source) { - request.source(source); + if (source.length % 2 != 0) { + throw new IllegalArgumentException("The number of object passed must be even but was [" + source.length + "]"); + } + this.sourceArray = source; return this; } @@ -145,7 +187,11 @@ public IndexRequestBuilder setSource(Object... source) { *

*/ public IndexRequestBuilder setSource(XContentType xContentType, Object... source) { - request.source(xContentType, source); + if (source.length % 2 != 0) { + throw new IllegalArgumentException("The number of object passed must be even but was [" + source.length + "]"); + } + this.sourceArray = source; + this.sourceContentType = xContentType; return this; } @@ -153,7 +199,7 @@ public IndexRequestBuilder setSource(XContentType xContentType, Object... source * Sets the type of operation to perform. */ public IndexRequestBuilder setOpType(DocWriteRequest.OpType opType) { - request.opType(opType); + this.opType = opType; return this; } @@ -161,7 +207,7 @@ public IndexRequestBuilder setOpType(DocWriteRequest.OpType opType) { * Set to {@code true} to force this index to use {@link org.elasticsearch.action.index.IndexRequest.OpType#CREATE}. */ public IndexRequestBuilder setCreate(boolean create) { - request.create(create); + this.create = create; return this; } @@ -170,7 +216,7 @@ public IndexRequestBuilder setCreate(boolean create) { * version exists and no changes happened on the doc since then. */ public IndexRequestBuilder setVersion(long version) { - request.version(version); + this.version = version; return this; } @@ -178,7 +224,7 @@ public IndexRequestBuilder setVersion(long version) { * Sets the versioning type. Defaults to {@link VersionType#INTERNAL}. */ public IndexRequestBuilder setVersionType(VersionType versionType) { - request.versionType(versionType); + this.versionType = versionType; return this; } @@ -190,7 +236,7 @@ public IndexRequestBuilder setVersionType(VersionType versionType) { * {@link org.elasticsearch.index.engine.VersionConflictEngineException} will be thrown. */ public IndexRequestBuilder setIfSeqNo(long seqNo) { - request.setIfSeqNo(seqNo); + this.ifSeqNo = seqNo; return this; } @@ -202,7 +248,7 @@ public IndexRequestBuilder setIfSeqNo(long seqNo) { * {@link org.elasticsearch.index.engine.VersionConflictEngineException} will be thrown. */ public IndexRequestBuilder setIfPrimaryTerm(long term) { - request.setIfPrimaryTerm(term); + this.ifPrimaryTerm = term; return this; } @@ -210,7 +256,7 @@ public IndexRequestBuilder setIfPrimaryTerm(long term) { * Sets the ingest pipeline to be executed before indexing the document */ public IndexRequestBuilder setPipeline(String pipeline) { - request.setPipeline(pipeline); + this.pipeline = pipeline; return this; } @@ -218,7 +264,7 @@ public IndexRequestBuilder setPipeline(String pipeline) { * Sets the require_alias flag */ public IndexRequestBuilder setRequireAlias(boolean requireAlias) { - request.setRequireAlias(requireAlias); + this.requireAlias = requireAlias; return this; } @@ -226,7 +272,117 @@ public IndexRequestBuilder setRequireAlias(boolean requireAlias) { * Sets the require_data_stream flag */ public IndexRequestBuilder setRequireDataStream(boolean requireDataStream) { - request.setRequireDataStream(requireDataStream); + this.requireDataStream = requireDataStream; + return this; + } + + public IndexRequestBuilder setRefreshPolicy(WriteRequest.RefreshPolicy refreshPolicy) { + this.refreshPolicy = refreshPolicy; return this; } + + public IndexRequestBuilder setRefreshPolicy(String refreshPolicy) { + this.refreshPolicyString = refreshPolicy; + return this; + } + + @Override + public IndexRequest request() { + validate(); + IndexRequest request = new IndexRequest(); + super.apply(request); + request.id(id); + if (sourceMap != null && sourceContentType != null) { + request.source(sourceMap, sourceContentType); + } else if (sourceMap != null) { + request.source(sourceMap); + } + if (sourceArray != null && sourceContentType != null) { + request.source(sourceContentType, sourceArray); + } else if (sourceArray != null) { + request.source(sourceArray); + } + if (sourceXContentBuilder != null) { + request.source(sourceXContentBuilder); + } + if (sourceString != null && sourceContentType != null) { + request.source(sourceString, sourceContentType); + } + if (sourceBytesReference != null && sourceContentType != null) { + request.source(sourceBytesReference, sourceContentType); + } + if (sourceBytes != null && sourceContentType != null) { + if (sourceOffset != null && sourceLength != null) { + request.source(sourceBytes, sourceOffset, sourceLength, sourceContentType); + } else { + request.source(sourceBytes, sourceContentType); + } + } + if (pipeline != null) { + request.setPipeline(pipeline); + } + if (routing != null) { + request.routing(routing); + } + if (refreshPolicy != null) { + request.setRefreshPolicy(refreshPolicy); + } + if (refreshPolicyString != null) { + request.setRefreshPolicy(refreshPolicyString); + } + if (ifSeqNo != null) { + request.setIfSeqNo(ifSeqNo); + } + if (ifPrimaryTerm != null) { + request.setIfPrimaryTerm(ifPrimaryTerm); + } + if (pipeline != null) { + request.setPipeline(pipeline); + } + if (requireAlias != null) { + request.setRequireAlias(requireAlias); + } + if (requireDataStream != null) { + request.setRequireDataStream(requireDataStream); + } + if (opType != null) { + request.opType(opType); + } + if (create != null) { + request.create(create); + } + if (version != null) { + request.version(version); + } + if (versionType != null) { + request.versionType(versionType); + } + return request; + } + + @Override + protected void validate() throws IllegalStateException { + super.validate(); + int sourceFieldsSet = countSourceFieldsSet(); + if (sourceFieldsSet > 1) { + throw new IllegalStateException("Only one setSource() method may be called, but " + sourceFieldsSet + " have been"); + } + } + + /* + * Returns the number of the source fields that are non-null (ideally this will be 1). + */ + private int countSourceFieldsSet() { + return countNonNullObjects(sourceMap, sourceArray, sourceXContentBuilder, sourceString, sourceBytesReference, sourceBytes); + } + + private int countNonNullObjects(Object... objects) { + int sum = 0; + for (Object object : objects) { + if (object != null) { + sum++; + } + } + return sum; + } } diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java index a4d5e07103df3..94935a670afb7 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java @@ -8,7 +8,7 @@ package org.elasticsearch.action.support.replication; -import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionRequestLazyBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActiveShardCount; @@ -18,18 +18,24 @@ public abstract class ReplicationRequestBuilder< Request extends ReplicationRequest, Response extends ActionResponse, - RequestBuilder extends ReplicationRequestBuilder> extends ActionRequestBuilder { + RequestBuilder extends ReplicationRequestBuilder> extends ActionRequestLazyBuilder< + Request, + Response> { + private String index; + private TimeValue timeout; + private String timeoutString; + private ActiveShardCount waitForActiveShards; - protected ReplicationRequestBuilder(ElasticsearchClient client, ActionType action, Request request) { - super(client, action, request); + protected ReplicationRequestBuilder(ElasticsearchClient client, ActionType action) { + super(client, action); } /** * A timeout to wait if the index operation can't be performed immediately. Defaults to {@code 1m}. */ @SuppressWarnings("unchecked") - public final RequestBuilder setTimeout(TimeValue timeout) { - request.timeout(timeout); + public RequestBuilder setTimeout(TimeValue timeout) { + this.timeout = timeout; return (RequestBuilder) this; } @@ -37,24 +43,28 @@ public final RequestBuilder setTimeout(TimeValue timeout) { * A timeout to wait if the index operation can't be performed immediately. Defaults to {@code 1m}. */ @SuppressWarnings("unchecked") - public final RequestBuilder setTimeout(String timeout) { - request.timeout(timeout); + public RequestBuilder setTimeout(String timeout) { + this.timeoutString = timeout; return (RequestBuilder) this; } @SuppressWarnings("unchecked") - public final RequestBuilder setIndex(String index) { - request.index(index); + public RequestBuilder setIndex(String index) { + this.index = index; return (RequestBuilder) this; } + public String getIndex() { + return index; + } + /** * Sets the number of shard copies that must be active before proceeding with the write. * See {@link ReplicationRequest#waitForActiveShards(ActiveShardCount)} for details. */ @SuppressWarnings("unchecked") public RequestBuilder setWaitForActiveShards(ActiveShardCount waitForActiveShards) { - request.waitForActiveShards(waitForActiveShards); + this.waitForActiveShards = waitForActiveShards; return (RequestBuilder) this; } @@ -66,4 +76,25 @@ public RequestBuilder setWaitForActiveShards(ActiveShardCount waitForActiveShard public RequestBuilder setWaitForActiveShards(final int waitForActiveShards) { return setWaitForActiveShards(ActiveShardCount.from(waitForActiveShards)); } + + protected void apply(Request request) { + if (index != null) { + request.index(index); + } + if (timeout != null) { + request.timeout(timeout); + } + if (timeoutString != null) { + request.timeout(timeoutString); + } + if (waitForActiveShards != null) { + request.waitForActiveShards(waitForActiveShards); + } + } + + protected void validate() throws IllegalStateException { + if (timeout != null && timeoutString != null) { + throw new IllegalStateException("Must use only one setTimeout method"); + } + } } diff --git a/server/src/main/java/org/elasticsearch/action/support/single/instance/InstanceShardOperationRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/support/single/instance/InstanceShardOperationRequestBuilder.java index 931f072e1e45e..1678b08aba940 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/instance/InstanceShardOperationRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/instance/InstanceShardOperationRequestBuilder.java @@ -8,7 +8,7 @@ package org.elasticsearch.action.support.single.instance; -import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionRequestLazyBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.client.internal.ElasticsearchClient; @@ -17,26 +17,33 @@ public abstract class InstanceShardOperationRequestBuilder< Request extends InstanceShardOperationRequest, Response extends ActionResponse, - RequestBuilder extends InstanceShardOperationRequestBuilder> extends ActionRequestBuilder< + RequestBuilder extends InstanceShardOperationRequestBuilder> extends ActionRequestLazyBuilder< Request, Response> { + private String index; + private TimeValue timeout; + private String timeoutString; - protected InstanceShardOperationRequestBuilder(ElasticsearchClient client, ActionType action, Request request) { - super(client, action, request); + protected InstanceShardOperationRequestBuilder(ElasticsearchClient client, ActionType action) { + super(client, action); } @SuppressWarnings("unchecked") - public final RequestBuilder setIndex(String index) { - request.index(index); + public RequestBuilder setIndex(String index) { + this.index = index; return (RequestBuilder) this; } + protected String getIndex() { + return index; + } + /** * A timeout to wait if the index operation can't be performed immediately. Defaults to {@code 1m}. */ @SuppressWarnings("unchecked") - public final RequestBuilder setTimeout(TimeValue timeout) { - request.timeout(timeout); + public RequestBuilder setTimeout(TimeValue timeout) { + this.timeout = timeout; return (RequestBuilder) this; } @@ -44,8 +51,26 @@ public final RequestBuilder setTimeout(TimeValue timeout) { * A timeout to wait if the index operation can't be performed immediately. Defaults to {@code 1m}. */ @SuppressWarnings("unchecked") - public final RequestBuilder setTimeout(String timeout) { - request.timeout(timeout); + public RequestBuilder setTimeout(String timeout) { + this.timeoutString = timeout; return (RequestBuilder) this; } + + protected void apply(Request request) { + if (index != null) { + request.index(index); + } + if (timeout != null) { + request.timeout(timeout); + } + if (timeoutString != null) { + request.timeout(timeoutString); + } + } + + protected void validate() throws IllegalStateException { + if (timeoutString != null && timeout != null) { + throw new IllegalStateException("Must use only one setTimeout method"); + } + } } diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java index 88bed844558f2..cbf28d6718594 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActiveShardCount; +import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.WriteRequestBuilder; import org.elasticsearch.action.support.replication.ReplicationRequest; import org.elasticsearch.action.support.single.instance.InstanceShardOperationRequestBuilder; @@ -26,19 +27,65 @@ public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder { + private String id; + private String routing; + private Script script; + + private String fetchSourceInclude; + private String fetchSourceExclude; + private String[] fetchSourceIncludeArray; + private String[] fetchSourceExcludeArray; + private Boolean fetchSource; + + private Integer retryOnConflict; + private Long version; + private VersionType versionType; + private Long ifSeqNo; + private Long ifPrimaryTerm; + private ActiveShardCount waitForActiveShards; + + private IndexRequest doc; + private XContentBuilder docSourceXContentBuilder; + private Map docSourceMap; + private XContentType docSourceXContentType; + private String docSourceString; + private byte[] docSourceBytes; + private Integer docSourceOffset; + private Integer docSourceLength; + private Object[] docSourceArray; + + private IndexRequest upsert; + private XContentBuilder upsertSourceXContentBuilder; + private Map upsertSourceMap; + private XContentType upsertSourceXContentType; + private String upsertSourceString; + private byte[] upsertSourceBytes; + private Integer upsertSourceOffset; + private Integer upsertSourceLength; + private Object[] upsertSourceArray; + + private Boolean docAsUpsert; + private Boolean detectNoop; + private Boolean scriptedUpsert; + private Boolean requireAlias; + private WriteRequest.RefreshPolicy refreshPolicy; + private String refreshPolicyString; + public UpdateRequestBuilder(ElasticsearchClient client) { - super(client, TransportUpdateAction.TYPE, new UpdateRequest()); + this(client, null, null); } public UpdateRequestBuilder(ElasticsearchClient client, String index, String id) { - super(client, TransportUpdateAction.TYPE, new UpdateRequest(index, id)); + super(client, TransportUpdateAction.TYPE); + setIndex(index); + setId(id); } /** * Sets the id of the indexed document. */ public UpdateRequestBuilder setId(String id) { - request.id(id); + this.id = id; return this; } @@ -47,7 +94,7 @@ public UpdateRequestBuilder setId(String id) { * and not the id. */ public UpdateRequestBuilder setRouting(String routing) { - request.routing(routing); + this.routing = routing; return this; } @@ -60,7 +107,7 @@ public UpdateRequestBuilder setRouting(String routing) { * */ public UpdateRequestBuilder setScript(Script script) { - request.script(script); + this.script = script; return this; } @@ -77,7 +124,8 @@ public UpdateRequestBuilder setScript(Script script) { * the returned _source */ public UpdateRequestBuilder setFetchSource(@Nullable String include, @Nullable String exclude) { - request.fetchSource(include, exclude); + this.fetchSourceInclude = include; + this.fetchSourceExclude = exclude; return this; } @@ -94,7 +142,8 @@ public UpdateRequestBuilder setFetchSource(@Nullable String include, @Nullable S * filter the returned _source */ public UpdateRequestBuilder setFetchSource(@Nullable String[] includes, @Nullable String[] excludes) { - request.fetchSource(includes, excludes); + this.fetchSourceIncludeArray = includes; + this.fetchSourceExcludeArray = excludes; return this; } @@ -102,7 +151,7 @@ public UpdateRequestBuilder setFetchSource(@Nullable String[] includes, @Nullabl * Indicates whether the response should contain the updated _source. */ public UpdateRequestBuilder setFetchSource(boolean fetchSource) { - request.fetchSource(fetchSource); + this.fetchSource = fetchSource; return this; } @@ -111,7 +160,7 @@ public UpdateRequestBuilder setFetchSource(boolean fetchSource) { * getting it and updating it. Defaults to 0. */ public UpdateRequestBuilder setRetryOnConflict(int retryOnConflict) { - request.retryOnConflict(retryOnConflict); + this.retryOnConflict = retryOnConflict; return this; } @@ -120,7 +169,7 @@ public UpdateRequestBuilder setRetryOnConflict(int retryOnConflict) { * version exists and no changes happened on the doc since then. */ public UpdateRequestBuilder setVersion(long version) { - request.version(version); + this.version = version; return this; } @@ -128,7 +177,7 @@ public UpdateRequestBuilder setVersion(long version) { * Sets the versioning type. Defaults to {@link org.elasticsearch.index.VersionType#INTERNAL}. */ public UpdateRequestBuilder setVersionType(VersionType versionType) { - request.versionType(versionType); + this.versionType = versionType; return this; } @@ -140,7 +189,7 @@ public UpdateRequestBuilder setVersionType(VersionType versionType) { * {@link org.elasticsearch.index.engine.VersionConflictEngineException} will be thrown. */ public UpdateRequestBuilder setIfSeqNo(long seqNo) { - request.setIfSeqNo(seqNo); + this.ifSeqNo = seqNo; return this; } @@ -152,7 +201,7 @@ public UpdateRequestBuilder setIfSeqNo(long seqNo) { * {@link org.elasticsearch.index.engine.VersionConflictEngineException} will be thrown. */ public UpdateRequestBuilder setIfPrimaryTerm(long term) { - request.setIfPrimaryTerm(term); + this.ifPrimaryTerm = term; return this; } @@ -161,7 +210,7 @@ public UpdateRequestBuilder setIfPrimaryTerm(long term) { * See {@link ReplicationRequest#waitForActiveShards(ActiveShardCount)} for details. */ public UpdateRequestBuilder setWaitForActiveShards(ActiveShardCount waitForActiveShards) { - request.waitForActiveShards(waitForActiveShards); + this.waitForActiveShards = waitForActiveShards; return this; } @@ -178,7 +227,7 @@ public UpdateRequestBuilder setWaitForActiveShards(final int waitForActiveShards * Sets the doc to use for updates when a script is not specified. */ public UpdateRequestBuilder setDoc(IndexRequest indexRequest) { - request.doc(indexRequest); + this.doc = indexRequest; return this; } @@ -186,7 +235,7 @@ public UpdateRequestBuilder setDoc(IndexRequest indexRequest) { * Sets the doc to use for updates when a script is not specified. */ public UpdateRequestBuilder setDoc(XContentBuilder source) { - request.doc(source); + this.docSourceXContentBuilder = source; return this; } @@ -194,7 +243,7 @@ public UpdateRequestBuilder setDoc(XContentBuilder source) { * Sets the doc to use for updates when a script is not specified. */ public UpdateRequestBuilder setDoc(Map source) { - request.doc(source); + this.docSourceMap = source; return this; } @@ -202,7 +251,8 @@ public UpdateRequestBuilder setDoc(Map source) { * Sets the doc to use for updates when a script is not specified. */ public UpdateRequestBuilder setDoc(Map source, XContentType contentType) { - request.doc(source, contentType); + this.docSourceMap = source; + this.docSourceXContentType = contentType; return this; } @@ -210,7 +260,8 @@ public UpdateRequestBuilder setDoc(Map source, XContentType cont * Sets the doc to use for updates when a script is not specified. */ public UpdateRequestBuilder setDoc(String source, XContentType xContentType) { - request.doc(source, xContentType); + this.docSourceString = source; + this.docSourceXContentType = xContentType; return this; } @@ -218,7 +269,8 @@ public UpdateRequestBuilder setDoc(String source, XContentType xContentType) { * Sets the doc to use for updates when a script is not specified. */ public UpdateRequestBuilder setDoc(byte[] source, XContentType xContentType) { - request.doc(source, xContentType); + this.docSourceBytes = source; + this.docSourceXContentType = xContentType; return this; } @@ -226,7 +278,10 @@ public UpdateRequestBuilder setDoc(byte[] source, XContentType xContentType) { * Sets the doc to use for updates when a script is not specified. */ public UpdateRequestBuilder setDoc(byte[] source, int offset, int length, XContentType xContentType) { - request.doc(source, offset, length, xContentType); + this.docSourceBytes = source; + this.docSourceOffset = offset; + this.docSourceLength = length; + this.docSourceXContentType = xContentType; return this; } @@ -235,7 +290,7 @@ public UpdateRequestBuilder setDoc(byte[] source, int offset, int length, XConte * is a field and value pairs. */ public UpdateRequestBuilder setDoc(Object... source) { - request.doc(source); + this.docSourceArray = source; return this; } @@ -244,7 +299,8 @@ public UpdateRequestBuilder setDoc(Object... source) { * is a field and value pairs. */ public UpdateRequestBuilder setDoc(XContentType xContentType, Object... source) { - request.doc(xContentType, source); + this.docSourceArray = source; + this.docSourceXContentType = xContentType; return this; } @@ -253,7 +309,7 @@ public UpdateRequestBuilder setDoc(XContentType xContentType, Object... source) * {@link org.elasticsearch.index.engine.DocumentMissingException} is thrown. */ public UpdateRequestBuilder setUpsert(IndexRequest indexRequest) { - request.upsert(indexRequest); + this.upsert = indexRequest; return this; } @@ -261,7 +317,7 @@ public UpdateRequestBuilder setUpsert(IndexRequest indexRequest) { * Sets the doc source of the update request to be used when the document does not exists. */ public UpdateRequestBuilder setUpsert(XContentBuilder source) { - request.upsert(source); + this.upsertSourceXContentBuilder = source; return this; } @@ -269,7 +325,7 @@ public UpdateRequestBuilder setUpsert(XContentBuilder source) { * Sets the doc source of the update request to be used when the document does not exists. */ public UpdateRequestBuilder setUpsert(Map source) { - request.upsert(source); + this.upsertSourceMap = source; return this; } @@ -277,7 +333,8 @@ public UpdateRequestBuilder setUpsert(Map source) { * Sets the doc source of the update request to be used when the document does not exists. */ public UpdateRequestBuilder setUpsert(Map source, XContentType contentType) { - request.upsert(source, contentType); + this.upsertSourceMap = source; + this.upsertSourceXContentType = contentType; return this; } @@ -285,7 +342,8 @@ public UpdateRequestBuilder setUpsert(Map source, XContentType c * Sets the doc source of the update request to be used when the document does not exists. */ public UpdateRequestBuilder setUpsert(String source, XContentType xContentType) { - request.upsert(source, xContentType); + this.upsertSourceString = source; + this.upsertSourceXContentType = xContentType; return this; } @@ -293,7 +351,8 @@ public UpdateRequestBuilder setUpsert(String source, XContentType xContentType) * Sets the doc source of the update request to be used when the document does not exists. */ public UpdateRequestBuilder setUpsert(byte[] source, XContentType xContentType) { - request.upsert(source, xContentType); + this.upsertSourceBytes = source; + this.upsertSourceXContentType = xContentType; return this; } @@ -301,7 +360,10 @@ public UpdateRequestBuilder setUpsert(byte[] source, XContentType xContentType) * Sets the doc source of the update request to be used when the document does not exists. */ public UpdateRequestBuilder setUpsert(byte[] source, int offset, int length, XContentType xContentType) { - request.upsert(source, offset, length, xContentType); + this.upsertSourceBytes = source; + this.upsertSourceOffset = offset; + this.upsertSourceLength = length; + this.upsertSourceXContentType = xContentType; return this; } @@ -310,7 +372,7 @@ public UpdateRequestBuilder setUpsert(byte[] source, int offset, int length, XCo * includes field and value pairs. */ public UpdateRequestBuilder setUpsert(Object... source) { - request.upsert(source); + this.upsertSourceArray = source; return this; } @@ -319,7 +381,8 @@ public UpdateRequestBuilder setUpsert(Object... source) { * includes field and value pairs. */ public UpdateRequestBuilder setUpsert(XContentType xContentType, Object... source) { - request.upsert(xContentType, source); + this.upsertSourceArray = source; + this.upsertSourceXContentType = xContentType; return this; } @@ -327,7 +390,7 @@ public UpdateRequestBuilder setUpsert(XContentType xContentType, Object... sourc * Sets whether the specified doc parameter should be used as upsert document. */ public UpdateRequestBuilder setDocAsUpsert(boolean shouldUpsertDoc) { - request.docAsUpsert(shouldUpsertDoc); + this.docAsUpsert = shouldUpsertDoc; return this; } @@ -336,7 +399,7 @@ public UpdateRequestBuilder setDocAsUpsert(boolean shouldUpsertDoc) { * Defaults to true. */ public UpdateRequestBuilder setDetectNoop(boolean detectNoop) { - request.detectNoop(detectNoop); + this.detectNoop = detectNoop; return this; } @@ -344,7 +407,7 @@ public UpdateRequestBuilder setDetectNoop(boolean detectNoop) { * Sets whether the script should be run in the case of an insert */ public UpdateRequestBuilder setScriptedUpsert(boolean scriptedUpsert) { - request.scriptedUpsert(scriptedUpsert); + this.scriptedUpsert = scriptedUpsert; return this; } @@ -352,7 +415,183 @@ public UpdateRequestBuilder setScriptedUpsert(boolean scriptedUpsert) { * Sets the require_alias flag */ public UpdateRequestBuilder setRequireAlias(boolean requireAlias) { - request.setRequireAlias(requireAlias); - return this; + this.requireAlias = requireAlias; + return this; + } + + @Override + public UpdateRequestBuilder setRefreshPolicy(WriteRequest.RefreshPolicy refreshPolicy) { + this.refreshPolicy = refreshPolicy; + return this; + } + + @Override + public UpdateRequestBuilder setRefreshPolicy(String refreshPolicy) { + this.refreshPolicyString = refreshPolicy; + return this; + } + + @Override + public UpdateRequest request() { + validate(); + UpdateRequest request = new UpdateRequest(); + super.apply(request); + if (id != null) { + request.id(id); + } + if (routing != null) { + request.routing(routing); + } + if (script != null) { + request.script(script); + } + if (fetchSourceInclude != null || fetchSourceExclude != null) { + request.fetchSource(fetchSourceInclude, fetchSourceExclude); + } + if (fetchSourceIncludeArray != null || fetchSourceExcludeArray != null) { + request.fetchSource(fetchSourceIncludeArray, fetchSourceExcludeArray); + } + if (fetchSource != null) { + request.fetchSource(fetchSource); + } + if (retryOnConflict != null) { + request.retryOnConflict(retryOnConflict); + } + if (version != null) { + request.version(version); + } + if (versionType != null) { + request.versionType(versionType); + } + if (ifSeqNo != null) { + request.setIfSeqNo(ifSeqNo); + } + if (ifPrimaryTerm != null) { + request.setIfPrimaryTerm(ifPrimaryTerm); + } + if (waitForActiveShards != null) { + request.waitForActiveShards(waitForActiveShards); + } + if (doc != null) { + request.doc(doc); + } + if (docSourceXContentBuilder != null) { + request.doc(docSourceXContentBuilder); + } + if (docSourceMap != null) { + if (docSourceXContentType == null) { + request.doc(docSourceMap); + } else { + request.doc(docSourceMap, docSourceXContentType); + } + } + if (docSourceString != null && docSourceXContentType != null) { + request.doc(docSourceString, docSourceXContentType); + } + if (docSourceBytes != null && docSourceXContentType != null) { + if (docSourceOffset != null && docSourceLength != null) { + request.doc(docSourceBytes, docSourceOffset, docSourceLength, docSourceXContentType); + } + } + if (docSourceArray != null) { + if (docSourceXContentType == null) { + request.doc(docSourceArray); + } else { + request.doc(docSourceXContentType, docSourceArray); + } + } + if (upsert != null) { + request.upsert(upsert); + } + if (upsertSourceXContentBuilder != null) { + request.upsert(upsertSourceXContentBuilder); + } + if (upsertSourceMap != null) { + if (upsertSourceXContentType == null) { + request.upsert(upsertSourceMap); + } else { + request.upsert(upsertSourceMap, upsertSourceXContentType); + } + } + if (upsertSourceString != null && upsertSourceXContentType != null) { + request.upsert(upsertSourceString, upsertSourceXContentType); + } + if (upsertSourceBytes != null && upsertSourceXContentType != null) { + if (upsertSourceOffset != null && upsertSourceLength != null) { + request.upsert(upsertSourceBytes, upsertSourceOffset, upsertSourceLength, upsertSourceXContentType); + } + } + if (upsertSourceArray != null) { + if (upsertSourceXContentType == null) { + request.upsert(upsertSourceArray); + } else { + request.upsert(upsertSourceXContentType, upsertSourceArray); + } + } + if (docAsUpsert != null) { + request.docAsUpsert(docAsUpsert); + } + if (detectNoop != null) { + request.detectNoop(detectNoop); + } + if (scriptedUpsert != null) { + request.scriptedUpsert(scriptedUpsert); + } + if (requireAlias != null) { + request.setRequireAlias(requireAlias); + } + if (refreshPolicy != null) { + request.setRefreshPolicy(refreshPolicy); + } + if (refreshPolicyString != null) { + request.setRefreshPolicy(refreshPolicyString); + } + return request; + } + + @Override + protected void validate() throws IllegalStateException { + super.validate(); + boolean fetchIncludeExcludeNotNull = fetchSourceInclude != null || fetchSourceExclude != null; + boolean fetchIncludeExcludeArrayNotNull = fetchSourceIncludeArray != null || fetchSourceExcludeArray != null; + boolean fetchSourceNotNull = fetchSource != null; + if ((fetchIncludeExcludeNotNull && fetchIncludeExcludeArrayNotNull) + || (fetchIncludeExcludeNotNull && fetchSourceNotNull) + || (fetchIncludeExcludeArrayNotNull && fetchSourceNotNull)) { + throw new IllegalStateException("Only one fetchSource() method may be called"); + } + int docSourceFieldsSet = countDocSourceFieldsSet(); + if (docSourceFieldsSet > 1) { + throw new IllegalStateException("Only one setDoc() method may be called, but " + docSourceFieldsSet + " have been"); + } + int upsertSourceFieldsSet = countUpsertSourceFieldsSet(); + if (upsertSourceFieldsSet > 1) { + throw new IllegalStateException("Only one setUpsert() method may be called, but " + upsertSourceFieldsSet + " have been"); + } + } + + private int countDocSourceFieldsSet() { + return countNonNullObjects(doc, docSourceXContentBuilder, docSourceMap, docSourceString, docSourceBytes, docSourceArray); + } + + private int countUpsertSourceFieldsSet() { + return countNonNullObjects( + upsert, + upsertSourceXContentBuilder, + upsertSourceMap, + upsertSourceString, + upsertSourceBytes, + upsertSourceArray + ); + } + + private int countNonNullObjects(Object... objects) { + int sum = 0; + for (Object object : objects) { + if (object != null) { + sum++; + } + } + return sum; } } diff --git a/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequestBuilder.java b/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequestBuilder.java index ffafc1be6a7ba..0aeb64d5b250f 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequestBuilder.java @@ -8,7 +8,7 @@ package org.elasticsearch.index.reindex; -import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionRequestLazyBuilder; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.support.ActiveShardCount; @@ -16,20 +16,44 @@ import org.elasticsearch.client.internal.ElasticsearchClient; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.search.builder.SearchSourceBuilder; + +import static org.elasticsearch.index.reindex.AbstractBulkByScrollRequest.DEFAULT_SCROLL_SIZE; +import static org.elasticsearch.index.reindex.AbstractBulkByScrollRequest.DEFAULT_SCROLL_TIMEOUT; public abstract class AbstractBulkByScrollRequestBuilder< Request extends AbstractBulkByScrollRequest, - Self extends AbstractBulkByScrollRequestBuilder> extends ActionRequestBuilder { + Self extends AbstractBulkByScrollRequestBuilder> extends ActionRequestLazyBuilder { private final SearchRequestBuilder source; + private Integer maxDocs; + private Boolean abortOnVersionConflict; + private Boolean refresh; + private TimeValue timeout; + private ActiveShardCount waitForActiveShards; + private TimeValue retryBackoffInitialTime; + private Integer maxRetries; + private Float requestsPerSecond; + private Boolean shouldStoreResult; + private Integer slices; protected AbstractBulkByScrollRequestBuilder( ElasticsearchClient client, ActionType action, - SearchRequestBuilder source, - Request request + SearchRequestBuilder source ) { - super(client, action, request); + super(client, action); this.source = source; + initSourceSearchRequest(); + } + + /* + * The following is normally done within the AbstractBulkByScrollRequest constructor. But that constructor is not called until the + * request() method is called once this builder is complete. Doing it there blows away changes made to the source request. + */ + private void initSourceSearchRequest() { + source.request().scroll(DEFAULT_SCROLL_TIMEOUT); + source.request().source(new SearchSourceBuilder()); + source.request().source().size(DEFAULT_SCROLL_SIZE); } protected abstract Self self(); @@ -73,7 +97,7 @@ public Self size(int size) { * documents. */ public Self maxDocs(int maxDocs) { - request.setMaxDocs(maxDocs); + this.maxDocs = maxDocs; return self(); } @@ -81,7 +105,7 @@ public Self maxDocs(int maxDocs) { * Set whether or not version conflicts cause the action to abort. */ public Self abortOnVersionConflict(boolean abortOnVersionConflict) { - request.setAbortOnVersionConflict(abortOnVersionConflict); + this.abortOnVersionConflict = abortOnVersionConflict; return self(); } @@ -89,7 +113,7 @@ public Self abortOnVersionConflict(boolean abortOnVersionConflict) { * Call refresh on the indexes we've written to after the request ends? */ public Self refresh(boolean refresh) { - request.setRefresh(refresh); + this.refresh = refresh; return self(); } @@ -97,7 +121,7 @@ public Self refresh(boolean refresh) { * Timeout to wait for the shards on to be available for each bulk request. */ public Self timeout(TimeValue timeout) { - request.setTimeout(timeout); + this.timeout = timeout; return self(); } @@ -106,7 +130,7 @@ public Self timeout(TimeValue timeout) { * See {@link ReplicationRequest#waitForActiveShards(ActiveShardCount)} for details. */ public Self waitForActiveShards(ActiveShardCount activeShardCount) { - request.setWaitForActiveShards(activeShardCount); + this.waitForActiveShards = activeShardCount; return self(); } @@ -115,7 +139,7 @@ public Self waitForActiveShards(ActiveShardCount activeShardCount) { * is about one minute per bulk request. Once the entire bulk request is successful the retry counter resets. */ public Self setRetryBackoffInitialTime(TimeValue retryBackoffInitialTime) { - request.setRetryBackoffInitialTime(retryBackoffInitialTime); + this.retryBackoffInitialTime = retryBackoffInitialTime; return self(); } @@ -123,7 +147,7 @@ public Self setRetryBackoffInitialTime(TimeValue retryBackoffInitialTime) { * Total number of retries attempted for rejections. There is no way to ask for unlimited retries. */ public Self setMaxRetries(int maxRetries) { - request.setMaxRetries(maxRetries); + this.maxRetries = maxRetries; return self(); } @@ -133,7 +157,7 @@ public Self setMaxRetries(int maxRetries) { * make sure that it contains any time that we might wait. */ public Self setRequestsPerSecond(float requestsPerSecond) { - request.setRequestsPerSecond(requestsPerSecond); + this.requestsPerSecond = requestsPerSecond; return self(); } @@ -141,7 +165,7 @@ public Self setRequestsPerSecond(float requestsPerSecond) { * Should this task store its result after it has finished? */ public Self setShouldStoreResult(boolean shouldStoreResult) { - request.setShouldStoreResult(shouldStoreResult); + this.shouldStoreResult = shouldStoreResult; return self(); } @@ -149,7 +173,40 @@ public Self setShouldStoreResult(boolean shouldStoreResult) { * The number of slices this task should be divided into. Defaults to 1 meaning the task isn't sliced into subtasks. */ public Self setSlices(int slices) { - request.setSlices(slices); + this.slices = slices; return self(); } + + protected void apply(Request request) { + if (maxDocs != null) { + request.setMaxDocs(maxDocs); + } + if (abortOnVersionConflict != null) { + request.setAbortOnVersionConflict(abortOnVersionConflict); + } + if (refresh != null) { + request.setRefresh(refresh); + } + if (timeout != null) { + request.setTimeout(timeout); + } + if (waitForActiveShards != null) { + request.setWaitForActiveShards(waitForActiveShards); + } + if (retryBackoffInitialTime != null) { + request.setRetryBackoffInitialTime(retryBackoffInitialTime); + } + if (maxRetries != null) { + request.setMaxRetries(maxRetries); + } + if (requestsPerSecond != null) { + request.setRequestsPerSecond(requestsPerSecond); + } + if (shouldStoreResult != null) { + request.setShouldStoreResult(shouldStoreResult); + } + if (slices != null) { + request.setSlices(slices); + } + } } diff --git a/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkIndexByScrollRequestBuilder.java b/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkIndexByScrollRequestBuilder.java index 53e878b643517..30114b1472dd5 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkIndexByScrollRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkIndexByScrollRequestBuilder.java @@ -16,21 +16,29 @@ public abstract class AbstractBulkIndexByScrollRequestBuilder< Request extends AbstractBulkIndexByScrollRequest, Self extends AbstractBulkIndexByScrollRequestBuilder> extends AbstractBulkByScrollRequestBuilder { + private Script script; protected AbstractBulkIndexByScrollRequestBuilder( ElasticsearchClient client, ActionType action, - SearchRequestBuilder search, - Request request + SearchRequestBuilder search ) { - super(client, action, search, request); + super(client, action, search); } /** * Script to modify the documents before they are processed. */ public Self script(Script script) { - request.setScript(script); + this.script = script; return self(); } + + @Override + public void apply(Request request) { + super.apply(request); + if (script != null) { + request.setScript(script); + } + } } diff --git a/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequest.java b/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequest.java index 85424c2eef7d2..6243859ec0e33 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequest.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequest.java @@ -60,7 +60,7 @@ public DeleteByQueryRequest(StreamInput in) throws IOException { super(in); } - private DeleteByQueryRequest(SearchRequest search, boolean setDefaults) { + DeleteByQueryRequest(SearchRequest search, boolean setDefaults) { super(search, setDefaults); // Delete-By-Query does not require the source if (setDefaults) { diff --git a/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequestBuilder.java b/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequestBuilder.java index 49d3c660a4b68..3452c6659a392 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequestBuilder.java @@ -8,17 +8,21 @@ package org.elasticsearch.index.reindex; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; public class DeleteByQueryRequestBuilder extends AbstractBulkByScrollRequestBuilder { + private Boolean abortOnVersionConflict; + public DeleteByQueryRequestBuilder(ElasticsearchClient client) { this(client, new SearchRequestBuilder(client)); } private DeleteByQueryRequestBuilder(ElasticsearchClient client, SearchRequestBuilder search) { - super(client, DeleteByQueryAction.INSTANCE, search, new DeleteByQueryRequest(search.request())); + super(client, DeleteByQueryAction.INSTANCE, search); + source().setFetchSource(false); } @Override @@ -28,7 +32,33 @@ protected DeleteByQueryRequestBuilder self() { @Override public DeleteByQueryRequestBuilder abortOnVersionConflict(boolean abortOnVersionConflict) { - request.setAbortOnVersionConflict(abortOnVersionConflict); + this.abortOnVersionConflict = abortOnVersionConflict; return this; } + + @Override + public DeleteByQueryRequest request() { + SearchRequest search = source().request(); + try { + DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(search, false); + try { + apply(deleteByQueryRequest); + return deleteByQueryRequest; + } catch (Exception e) { + deleteByQueryRequest.decRef(); + throw e; + } + } catch (Exception e) { + search.decRef(); + throw e; + } + } + + @Override + public void apply(DeleteByQueryRequest request) { + super.apply(request); + if (abortOnVersionConflict != null) { + request.setAbortOnVersionConflict(abortOnVersionConflict); + } + } } diff --git a/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java b/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java index a1f741d7d51d6..683ec75c57d76 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java @@ -68,7 +68,7 @@ public ReindexRequest() { this(search, destination, true); } - private ReindexRequest(SearchRequest search, IndexRequest destination, boolean setDefaults) { + ReindexRequest(SearchRequest search, IndexRequest destination, boolean setDefaults) { super(search, setDefaults); this.destination = destination; } diff --git a/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequestBuilder.java b/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequestBuilder.java index 88a851bee15e0..156b39d608654 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequestBuilder.java @@ -8,20 +8,23 @@ package org.elasticsearch.index.reindex; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; public class ReindexRequestBuilder extends AbstractBulkIndexByScrollRequestBuilder { - private final IndexRequestBuilder destination; + private final IndexRequestBuilder destinationBuilder; + private RemoteInfo remoteInfo; public ReindexRequestBuilder(ElasticsearchClient client) { this(client, new SearchRequestBuilder(client), new IndexRequestBuilder(client)); } private ReindexRequestBuilder(ElasticsearchClient client, SearchRequestBuilder search, IndexRequestBuilder destination) { - super(client, ReindexAction.INSTANCE, search, new ReindexRequest(search.request(), destination.request())); - this.destination = destination; + super(client, ReindexAction.INSTANCE, search); + this.destinationBuilder = destination; } @Override @@ -30,14 +33,14 @@ protected ReindexRequestBuilder self() { } public IndexRequestBuilder destination() { - return destination; + return destinationBuilder; } /** * Set the destination index. */ public ReindexRequestBuilder destination(String index) { - destination.setIndex(index); + destinationBuilder.setIndex(index); return this; } @@ -45,7 +48,34 @@ public ReindexRequestBuilder destination(String index) { * Setup reindexing from a remote cluster. */ public ReindexRequestBuilder setRemoteInfo(RemoteInfo remoteInfo) { - request().setRemoteInfo(remoteInfo); + this.remoteInfo = remoteInfo; return this; } + + @Override + public ReindexRequest request() { + SearchRequest source = source().request(); + try { + IndexRequest destination = destinationBuilder.request(); + try { + ReindexRequest reindexRequest = new ReindexRequest(source, destination, false); + try { + super.apply(reindexRequest); + if (remoteInfo != null) { + reindexRequest.setRemoteInfo(remoteInfo); + } + return reindexRequest; + } catch (Exception e) { + reindexRequest.decRef(); + throw e; + } + } catch (Exception e) { + destination.decRef(); + throw e; + } + } catch (Exception e) { + source.decRef(); + throw e; + } + } } diff --git a/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryRequest.java b/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryRequest.java index d30b54fdafd42..44b959074ed76 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryRequest.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryRequest.java @@ -52,7 +52,7 @@ public UpdateByQueryRequest(StreamInput in) throws IOException { pipeline = in.readOptionalString(); } - private UpdateByQueryRequest(SearchRequest search, boolean setDefaults) { + UpdateByQueryRequest(SearchRequest search, boolean setDefaults) { super(search, setDefaults); } diff --git a/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryRequestBuilder.java b/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryRequestBuilder.java index b63ebdf1def86..270014d6ab3f2 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryRequestBuilder.java @@ -8,6 +8,7 @@ package org.elasticsearch.index.reindex; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; @@ -15,12 +16,15 @@ public class UpdateByQueryRequestBuilder extends AbstractBulkIndexByScrollReques UpdateByQueryRequest, UpdateByQueryRequestBuilder> { + private Boolean abortOnVersionConflict; + private String pipeline; + public UpdateByQueryRequestBuilder(ElasticsearchClient client) { this(client, new SearchRequestBuilder(client)); } private UpdateByQueryRequestBuilder(ElasticsearchClient client, SearchRequestBuilder search) { - super(client, UpdateByQueryAction.INSTANCE, search, new UpdateByQueryRequest(search.request())); + super(client, UpdateByQueryAction.INSTANCE, search); } @Override @@ -30,12 +34,41 @@ protected UpdateByQueryRequestBuilder self() { @Override public UpdateByQueryRequestBuilder abortOnVersionConflict(boolean abortOnVersionConflict) { - request.setAbortOnVersionConflict(abortOnVersionConflict); + this.abortOnVersionConflict = abortOnVersionConflict; return this; } public UpdateByQueryRequestBuilder setPipeline(String pipeline) { - request.setPipeline(pipeline); + this.pipeline = pipeline; return this; } + + @Override + public UpdateByQueryRequest request() { + SearchRequest search = source().request(); + try { + UpdateByQueryRequest updateByQueryRequest = new UpdateByQueryRequest(search, false); + try { + apply(updateByQueryRequest); + return updateByQueryRequest; + } catch (Exception e) { + updateByQueryRequest.decRef(); + throw e; + } + } catch (Exception e) { + search.decRef(); + throw e; + } + } + + @Override + public void apply(UpdateByQueryRequest request) { + super.apply(request); + if (abortOnVersionConflict != null) { + request.setAbortOnVersionConflict(abortOnVersionConflict); + } + if (pipeline != null) { + request.setPipeline(pipeline); + } + } } diff --git a/server/src/test/java/org/elasticsearch/action/delete/DeleteRequestBuilderTests.java b/server/src/test/java/org/elasticsearch/action/delete/DeleteRequestBuilderTests.java new file mode 100644 index 0000000000000..0a59dac833ca9 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/delete/DeleteRequestBuilderTests.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.delete; + +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.ESTestCase; + +public class DeleteRequestBuilderTests extends ESTestCase { + + public void testValidation() { + DeleteRequestBuilder deleteRequestBuilder = new DeleteRequestBuilder(null, randomAlphaOfLength(10)); + deleteRequestBuilder.setRefreshPolicy(randomFrom(WriteRequest.RefreshPolicy.values()).toString()); + deleteRequestBuilder.setRefreshPolicy(randomFrom(WriteRequest.RefreshPolicy.values())); + expectThrows(IllegalStateException.class, deleteRequestBuilder::request); + + deleteRequestBuilder = new DeleteRequestBuilder(null, randomAlphaOfLength(10)); + deleteRequestBuilder.setTimeout(randomTimeValue()); + deleteRequestBuilder.setTimeout(TimeValue.timeValueSeconds(randomIntBetween(1, 30))); + expectThrows(IllegalStateException.class, deleteRequestBuilder::request); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/index/IndexRequestBuilderTests.java b/server/src/test/java/org/elasticsearch/action/index/IndexRequestBuilderTests.java index 9af522524abc9..778bd6a1d138e 100644 --- a/server/src/test/java/org/elasticsearch/action/index/IndexRequestBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/action/index/IndexRequestBuilderTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.index; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpClient; import org.elasticsearch.threadpool.TestThreadPool; @@ -82,4 +83,20 @@ public void testSetSource() throws Exception { indexRequestBuilder.setSource(doc); assertEquals(EXPECTED_SOURCE, XContentHelper.convertToJson(indexRequestBuilder.request().source(), true)); } + + public void testValidation() { + IndexRequestBuilder indexRequestBuilder = new IndexRequestBuilder(this.testClient); + Map source = new HashMap<>(); + source.put("SomeKey", "SomeValue"); + indexRequestBuilder.setSource(source); + assertNotNull(indexRequestBuilder.request()); + indexRequestBuilder.setSource("SomeKey", "SomeValue"); + expectThrows(IllegalStateException.class, indexRequestBuilder::request); + + indexRequestBuilder = new IndexRequestBuilder(this.testClient); + indexRequestBuilder.setTimeout(randomTimeValue()); + assertNotNull(indexRequestBuilder.request()); + indexRequestBuilder.setTimeout(TimeValue.timeValueSeconds(randomIntBetween(1, 30))); + expectThrows(IllegalStateException.class, indexRequestBuilder::request); + } } diff --git a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestBuilderTests.java b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestBuilderTests.java new file mode 100644 index 0000000000000..8091daed2b5b0 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestBuilderTests.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.update; + +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.ESTestCase; + +import java.util.Map; + +public class UpdateRequestBuilderTests extends ESTestCase { + + public void testValidation() { + UpdateRequestBuilder updateRequestBuilder = new UpdateRequestBuilder(null); + updateRequestBuilder.setFetchSource(randomAlphaOfLength(10), randomAlphaOfLength(10)); + updateRequestBuilder.setFetchSource(true); + expectThrows(IllegalStateException.class, updateRequestBuilder::request); + + updateRequestBuilder = new UpdateRequestBuilder(null); + updateRequestBuilder.setTimeout(randomTimeValue()); + updateRequestBuilder.setTimeout(TimeValue.timeValueSeconds(randomIntBetween(1, 30))); + expectThrows(IllegalStateException.class, updateRequestBuilder::request); + + updateRequestBuilder = new UpdateRequestBuilder(null); + updateRequestBuilder.setDoc("key", "value"); + updateRequestBuilder.setDoc(Map.of("key", "value")); + expectThrows(IllegalStateException.class, updateRequestBuilder::request); + + updateRequestBuilder = new UpdateRequestBuilder(null); + updateRequestBuilder.setUpsert("key", "value"); + updateRequestBuilder.setUpsert(Map.of("key", "value")); + expectThrows(IllegalStateException.class, updateRequestBuilder::request); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 65b28ad874431..82e02ec1db70f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -1649,7 +1649,7 @@ public void indexRandom(boolean forceRefresh, boolean dummyDocuments, boolean ma Set indices = new HashSet<>(); builders = new ArrayList<>(builders); for (IndexRequestBuilder builder : builders) { - indices.add(builder.request().index()); + indices.add(builder.getIndex()); } Set> bogusIds = new HashSet<>(); // (index, type, id) if (random.nextBoolean() && builders.isEmpty() == false && dummyDocuments) { From 40a61abb952e0cb2f3c49124a2af7ad1029b40e1 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Sat, 3 Feb 2024 18:34:03 -0800 Subject: [PATCH 007/106] Awaits fix #105104 --- .../aggregations/bucket/TimeSeriesTsidHashCardinalityIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/TimeSeriesTsidHashCardinalityIT.java b/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/TimeSeriesTsidHashCardinalityIT.java index b1db2f8a7d3a1..278f905e3900b 100644 --- a/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/TimeSeriesTsidHashCardinalityIT.java +++ b/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/TimeSeriesTsidHashCardinalityIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.aggregations.bucket; +import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.search.SearchResponse; @@ -43,6 +44,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105104") public class TimeSeriesTsidHashCardinalityIT extends ESSingleNodeTestCase { private static final String START_TIME = "2021-01-01T00:00:00Z"; private static final String END_TIME = "2021-12-31T23:59:59Z"; From e1488a0fc7ddc2ba01ca72f92409c8a6dd92a10c Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Sat, 3 Feb 2024 20:48:11 -0800 Subject: [PATCH 008/106] Fix compilation of example rest handler (#105101) --- .../example/resthandler/ExampleRestHandlerPlugin.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/plugins/examples/rest-handler/src/main/java/org/elasticsearch/example/resthandler/ExampleRestHandlerPlugin.java b/plugins/examples/rest-handler/src/main/java/org/elasticsearch/example/resthandler/ExampleRestHandlerPlugin.java index e142ba80147e0..a820973c19ca3 100644 --- a/plugins/examples/rest-handler/src/main/java/org/elasticsearch/example/resthandler/ExampleRestHandlerPlugin.java +++ b/plugins/examples/rest-handler/src/main/java/org/elasticsearch/example/resthandler/ExampleRestHandlerPlugin.java @@ -15,12 +15,14 @@ import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.features.NodeFeature; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; import java.util.List; +import java.util.function.Predicate; import java.util.function.Supplier; import static java.util.Collections.singletonList; @@ -35,8 +37,8 @@ public List getRestHandlers(final Settings settings, final IndexScopedSettings indexScopedSettings, final SettingsFilter settingsFilter, final IndexNameExpressionResolver indexNameExpressionResolver, - final Supplier nodesInCluster) { - + final Supplier nodesInCluster, + final Predicate clusterSupportsFeature) { return singletonList(new ExampleCatAction()); } } From 552d2f563b31df1d42e2d927efc8e9b17310bc21 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Sun, 4 Feb 2024 19:21:50 +1100 Subject: [PATCH 009/106] Expose OperationPurpose via CustomQueryParameter to s3 logs (#105044) This PR adds the OperationPurpose as a custom query parameter for each S3 request so that they are available in s3 access logs. Resolves: ES-7750 --- docs/changelog/105044.yaml | 5 + .../s3/S3BlobStoreRepositoryTests.java | 65 +++++++++++-- .../repositories/s3/S3BlobContainer.java | 38 ++++---- .../repositories/s3/S3BlobStore.java | 21 ++++- .../s3/S3RetryingInputStream.java | 3 +- .../s3/S3BlobContainerRetriesTests.java | 33 ++++--- .../main/java/fixture/s3/S3HttpHandler.java | 91 +++++++++++++++---- 7 files changed, 199 insertions(+), 57 deletions(-) create mode 100644 docs/changelog/105044.yaml diff --git a/docs/changelog/105044.yaml b/docs/changelog/105044.yaml new file mode 100644 index 0000000000000..5a9a11f928f98 --- /dev/null +++ b/docs/changelog/105044.yaml @@ -0,0 +1,5 @@ +pr: 105044 +summary: Expose `OperationPurpose` via `CustomQueryParameter` to s3 logs +area: Snapshot/Restore +type: enhancement +issues: [] diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java index 9ad2c57b7f585..1d9a5fee31d39 100644 --- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java @@ -31,6 +31,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.Maps; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexVersion; @@ -72,6 +74,7 @@ import java.util.Objects; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; import java.util.stream.Collectors; import java.util.stream.StreamSupport; @@ -246,7 +249,7 @@ public void testMetrics() throws Exception { assertHitCount(prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs); assertAcked(clusterAdmin().prepareDeleteSnapshot(repository, snapshot).get()); - final Map aggregatedMetrics = new HashMap<>(); + final Map aggregatedMetrics = new HashMap<>(); // Compare collected stats and metrics for each node and they should be the same for (var nodeName : internalCluster().getNodeNames()) { final BlobStoreRepository blobStoreRepository; @@ -293,13 +296,12 @@ public void testMetrics() throws Exception { metric.getLong(), equalTo(statsCollectors.get(statsKey).counter.sum()) ); - - aggregatedMetrics.compute(operation.getKey(), (k, v) -> v == null ? metric.getLong() : v + metric.getLong()); + aggregatedMetrics.compute(statsKey, (k, v) -> v == null ? metric.getLong() : v + metric.getLong()); }); } // Metrics number should be consistent with server side request count as well. - assertThat(aggregatedMetrics, equalTo(getMockRequestCounts())); + assertThat(aggregatedMetrics, equalTo(getServerMetrics())); } public void testRequestStatsWithOperationPurposes() throws IOException { @@ -423,6 +425,18 @@ public void testEnforcedCooldownPeriod() throws IOException { assertThat(repository.threadPool().relativeTimeInNanos() - beforeFastDelete, lessThan(TEST_COOLDOWN_PERIOD.getNanos())); } + private Map getServerMetrics() { + for (HttpHandler h : handlers.values()) { + while (h instanceof DelegatingHttpHandler) { + if (h instanceof S3StatsCollectorHttpHandler s3StatsCollectorHttpHandler) { + return Maps.transformValues(s3StatsCollectorHttpHandler.getMetricsCount(), AtomicLong::get); + } + h = ((DelegatingHttpHandler) h).getDelegate(); + } + } + return Collections.emptyMap(); + } + /** * S3RepositoryPlugin that allows to disable chunked encoding and to set a low threshold between single upload and multipart upload. */ @@ -525,13 +539,21 @@ protected String requestUniqueId(final HttpExchange exchange) { @SuppressForbidden(reason = "this test uses a HttpServer to emulate an S3 endpoint") protected class S3StatsCollectorHttpHandler extends HttpStatsCollectorHandler { + private final Map metricsCount = ConcurrentCollections.newConcurrentMap(); + S3StatsCollectorHttpHandler(final HttpHandler delegate) { super(delegate); } @Override public void handle(HttpExchange exchange) throws IOException { - final String request = exchange.getRequestMethod() + " " + exchange.getRequestURI(); + final S3HttpHandler.RequestComponents requestComponents = S3HttpHandler.parseRequestComponents( + S3HttpHandler.getRawRequestString(exchange) + ); + if (false == requestComponents.request().startsWith("HEAD ")) { + assertThat(requestComponents.customQueryParameters(), hasKey(S3BlobStore.CUSTOM_QUERY_PARAMETER_PURPOSE)); + } + final String request = requestComponents.request(); if (shouldFailCompleteMultipartUploadRequest.get() && Regex.simpleMatch("POST /*/*?uploadId=*", request)) { try (exchange) { drainInputStream(exchange.getRequestBody()); @@ -546,22 +568,53 @@ public void handle(HttpExchange exchange) throws IOException { } @Override - public void maybeTrack(final String request, Headers requestHeaders) { + public void maybeTrack(final String rawRequest, Headers requestHeaders) { + final S3HttpHandler.RequestComponents requestComponents = S3HttpHandler.parseRequestComponents(rawRequest); + final String request = requestComponents.request(); + final OperationPurpose purpose; + // TODO: Remove the condition once ES-7810 is resolved + if (false == request.startsWith("HEAD ")) { + purpose = OperationPurpose.parse( + requestComponents.customQueryParameters().get(S3BlobStore.CUSTOM_QUERY_PARAMETER_PURPOSE).get(0) + ); + } else { + purpose = null; + } if (Regex.simpleMatch("GET /*/?prefix=*", request)) { trackRequest("ListObjects"); + metricsCount.computeIfAbsent(new S3BlobStore.StatsKey(S3BlobStore.Operation.LIST_OBJECTS, purpose), k -> new AtomicLong()) + .incrementAndGet(); } else if (Regex.simpleMatch("GET /*/*", request)) { trackRequest("GetObject"); + metricsCount.computeIfAbsent(new S3BlobStore.StatsKey(S3BlobStore.Operation.GET_OBJECT, purpose), k -> new AtomicLong()) + .incrementAndGet(); } else if (isMultiPartUpload(request)) { trackRequest("PutMultipartObject"); + metricsCount.computeIfAbsent( + new S3BlobStore.StatsKey(S3BlobStore.Operation.PUT_MULTIPART_OBJECT, purpose), + k -> new AtomicLong() + ).incrementAndGet(); } else if (Regex.simpleMatch("PUT /*/*", request)) { trackRequest("PutObject"); + metricsCount.computeIfAbsent(new S3BlobStore.StatsKey(S3BlobStore.Operation.PUT_OBJECT, purpose), k -> new AtomicLong()) + .incrementAndGet(); } else if (Regex.simpleMatch("POST /*/?delete", request)) { trackRequest("DeleteObjects"); + metricsCount.computeIfAbsent(new S3BlobStore.StatsKey(S3BlobStore.Operation.DELETE_OBJECTS, purpose), k -> new AtomicLong()) + .incrementAndGet(); } else if (Regex.simpleMatch("DELETE /*/*?uploadId=*", request)) { trackRequest("AbortMultipartObject"); + metricsCount.computeIfAbsent( + new S3BlobStore.StatsKey(S3BlobStore.Operation.ABORT_MULTIPART_OBJECT, purpose), + k -> new AtomicLong() + ).incrementAndGet(); } } + Map getMetricsCount() { + return metricsCount; + } + private boolean isMultiPartUpload(String request) { return Regex.simpleMatch("POST /*/*?uploads", request) || Regex.simpleMatch("POST /*/*?*uploadId=*", request) diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java index dadd15ed640c0..b70fd8e87eeef 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java @@ -90,6 +90,7 @@ class S3BlobContainer extends AbstractBlobContainer { @Override public boolean blobExists(OperationPurpose purpose, String blobName) { + // TODO: Exists request needs to be include for metrics as well, see ES-7810 try (AmazonS3Reference clientReference = blobStore.clientReference()) { return SocketAccess.doPrivileged(() -> clientReference.client().doesObjectExist(blobStore.bucket(), buildKey(blobName))); } catch (final Exception e) { @@ -207,7 +208,7 @@ protected void onCompletion() throws IOException { uploadId.get(), parts ); - complRequest.setRequestMetricCollector(blobStore.getMetricCollector(Operation.PUT_MULTIPART_OBJECT, purpose)); + S3BlobStore.configureRequestForMetrics(complRequest, blobStore, Operation.PUT_MULTIPART_OBJECT, purpose); SocketAccess.doPrivilegedVoid(() -> clientReference.client().completeMultipartUpload(complRequest)); } } @@ -240,7 +241,7 @@ private UploadPartRequest createPartUploadRequest( uploadRequest.setUploadId(uploadId); uploadRequest.setPartNumber(number); uploadRequest.setInputStream(stream); - uploadRequest.setRequestMetricCollector(blobStore.getMetricCollector(Operation.PUT_MULTIPART_OBJECT, purpose)); + S3BlobStore.configureRequestForMetrics(uploadRequest, blobStore, Operation.PUT_MULTIPART_OBJECT, purpose); uploadRequest.setPartSize(size); uploadRequest.setLastPart(lastPart); return uploadRequest; @@ -248,7 +249,7 @@ private UploadPartRequest createPartUploadRequest( private void abortMultiPartUpload(OperationPurpose purpose, String uploadId, String blobName) { final AbortMultipartUploadRequest abortRequest = new AbortMultipartUploadRequest(blobStore.bucket(), blobName, uploadId); - abortRequest.setRequestMetricCollector(blobStore.getMetricCollector(Operation.ABORT_MULTIPART_OBJECT, purpose)); + S3BlobStore.configureRequestForMetrics(abortRequest, blobStore, Operation.ABORT_MULTIPART_OBJECT, purpose); try (AmazonS3Reference clientReference = blobStore.clientReference()) { SocketAccess.doPrivilegedVoid(() -> clientReference.client().abortMultipartUpload(abortRequest)); } @@ -258,7 +259,7 @@ private InitiateMultipartUploadRequest initiateMultiPartUpload(OperationPurpose final InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest(blobStore.bucket(), blobName); initRequest.setStorageClass(blobStore.getStorageClass()); initRequest.setCannedACL(blobStore.getCannedACL()); - initRequest.setRequestMetricCollector(blobStore.getMetricCollector(Operation.PUT_MULTIPART_OBJECT, purpose)); + S3BlobStore.configureRequestForMetrics(initRequest, blobStore, Operation.PUT_MULTIPART_OBJECT, purpose); if (blobStore.serverSideEncryption()) { final ObjectMetadata md = new ObjectMetadata(); md.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION); @@ -289,13 +290,13 @@ public DeleteResult delete(OperationPurpose purpose) throws IOException { final ObjectListing list; if (prevListing != null) { final var listNextBatchOfObjectsRequest = new ListNextBatchOfObjectsRequest(prevListing); - listNextBatchOfObjectsRequest.setRequestMetricCollector(blobStore.getMetricCollector(Operation.LIST_OBJECTS, purpose)); + S3BlobStore.configureRequestForMetrics(listNextBatchOfObjectsRequest, blobStore, Operation.LIST_OBJECTS, purpose); list = SocketAccess.doPrivileged(() -> clientReference.client().listNextBatchOfObjects(listNextBatchOfObjectsRequest)); } else { final ListObjectsRequest listObjectsRequest = new ListObjectsRequest(); listObjectsRequest.setBucketName(blobStore.bucket()); listObjectsRequest.setPrefix(keyPath); - listObjectsRequest.setRequestMetricCollector(blobStore.getMetricCollector(Operation.LIST_OBJECTS, purpose)); + S3BlobStore.configureRequestForMetrics(listObjectsRequest, blobStore, Operation.LIST_OBJECTS, purpose); list = SocketAccess.doPrivileged(() -> clientReference.client().listObjects(listObjectsRequest)); } final Iterator blobNameIterator = Iterators.map(list.getObjectSummaries().iterator(), summary -> { @@ -378,7 +379,7 @@ private List executeListing( ObjectListing list; if (prevListing != null) { final var listNextBatchOfObjectsRequest = new ListNextBatchOfObjectsRequest(prevListing); - listNextBatchOfObjectsRequest.setRequestMetricCollector(blobStore.getMetricCollector(Operation.LIST_OBJECTS, purpose)); + S3BlobStore.configureRequestForMetrics(listNextBatchOfObjectsRequest, blobStore, Operation.LIST_OBJECTS, purpose); list = SocketAccess.doPrivileged(() -> clientReference.client().listNextBatchOfObjects(listNextBatchOfObjectsRequest)); } else { list = SocketAccess.doPrivileged(() -> clientReference.client().listObjects(listObjectsRequest)); @@ -394,10 +395,11 @@ private List executeListing( } private ListObjectsRequest listObjectsRequest(OperationPurpose purpose, String pathPrefix) { - return new ListObjectsRequest().withBucketName(blobStore.bucket()) + final ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(blobStore.bucket()) .withPrefix(pathPrefix) - .withDelimiter("/") - .withRequestMetricCollector(blobStore.getMetricCollector(Operation.LIST_OBJECTS, purpose)); + .withDelimiter("/"); + S3BlobStore.configureRequestForMetrics(listObjectsRequest, blobStore, Operation.LIST_OBJECTS, purpose); + return listObjectsRequest; } // exposed for tests @@ -432,7 +434,7 @@ void executeSingleUpload( final PutObjectRequest putRequest = new PutObjectRequest(s3BlobStore.bucket(), blobName, input, md); putRequest.setStorageClass(s3BlobStore.getStorageClass()); putRequest.setCannedAcl(s3BlobStore.getCannedACL()); - putRequest.setRequestMetricCollector(s3BlobStore.getMetricCollector(Operation.PUT_OBJECT, purpose)); + S3BlobStore.configureRequestForMetrics(putRequest, blobStore, Operation.PUT_OBJECT, purpose); try (AmazonS3Reference clientReference = s3BlobStore.clientReference()) { SocketAccess.doPrivilegedVoid(() -> { clientReference.client().putObject(putRequest); }); @@ -510,7 +512,7 @@ void executeMultipartUpload( uploadId.get(), parts ); - complRequest.setRequestMetricCollector(blobStore.getMetricCollector(Operation.PUT_MULTIPART_OBJECT, purpose)); + S3BlobStore.configureRequestForMetrics(complRequest, blobStore, Operation.PUT_MULTIPART_OBJECT, purpose); SocketAccess.doPrivilegedVoid(() -> clientReference.client().completeMultipartUpload(complRequest)); success = true; @@ -708,7 +710,7 @@ private void logUploads(String description, List uploads) { private List listMultipartUploads() { final var listRequest = new ListMultipartUploadsRequest(bucket); listRequest.setPrefix(blobKey); - listRequest.setRequestMetricCollector(blobStore.getMetricCollector(Operation.LIST_OBJECTS, purpose)); + S3BlobStore.configureRequestForMetrics(listRequest, blobStore, Operation.LIST_OBJECTS, purpose); try { return SocketAccess.doPrivileged(() -> client.listMultipartUploads(listRequest)).getMultipartUploads(); } catch (AmazonS3Exception e) { @@ -721,7 +723,7 @@ private List listMultipartUploads() { private String initiateMultipartUpload() { final var initiateRequest = new InitiateMultipartUploadRequest(bucket, blobKey); - initiateRequest.setRequestMetricCollector(blobStore.getMetricCollector(Operation.PUT_MULTIPART_OBJECT, purpose)); + S3BlobStore.configureRequestForMetrics(initiateRequest, blobStore, Operation.PUT_MULTIPART_OBJECT, purpose); return SocketAccess.doPrivileged(() -> client.initiateMultipartUpload(initiateRequest)).getUploadId(); } @@ -734,7 +736,7 @@ private PartETag uploadPart(BytesReference updated, String uploadId) throws IOEx uploadPartRequest.setLastPart(true); uploadPartRequest.setInputStream(updated.streamInput()); uploadPartRequest.setPartSize(updated.length()); - uploadPartRequest.setRequestMetricCollector(blobStore.getMetricCollector(Operation.PUT_MULTIPART_OBJECT, purpose)); + S3BlobStore.configureRequestForMetrics(uploadPartRequest, blobStore, Operation.PUT_MULTIPART_OBJECT, purpose); return SocketAccess.doPrivileged(() -> client.uploadPart(uploadPartRequest)).getPartETag(); } @@ -828,7 +830,7 @@ private void safeAbortMultipartUpload(String uploadId) { private void abortMultipartUploadIfExists(String uploadId) { try { final var request = new AbortMultipartUploadRequest(bucket, blobKey, uploadId); - request.setRequestMetricCollector(blobStore.getMetricCollector(Operation.ABORT_MULTIPART_OBJECT, purpose)); + S3BlobStore.configureRequestForMetrics(request, blobStore, Operation.ABORT_MULTIPART_OBJECT, purpose); SocketAccess.doPrivilegedVoid(() -> client.abortMultipartUpload(request)); } catch (AmazonS3Exception e) { if (e.getStatusCode() != 404) { @@ -840,7 +842,7 @@ private void abortMultipartUploadIfExists(String uploadId) { private void completeMultipartUpload(String uploadId, PartETag partETag) { final var completeMultipartUploadRequest = new CompleteMultipartUploadRequest(bucket, blobKey, uploadId, List.of(partETag)); - completeMultipartUploadRequest.setRequestMetricCollector(blobStore.getMetricCollector(Operation.PUT_MULTIPART_OBJECT, purpose)); + S3BlobStore.configureRequestForMetrics(completeMultipartUploadRequest, blobStore, Operation.PUT_MULTIPART_OBJECT, purpose); SocketAccess.doPrivilegedVoid(() -> client.completeMultipartUpload(completeMultipartUploadRequest)); } } @@ -875,7 +877,7 @@ public void compareAndExchangeRegister( public void getRegister(OperationPurpose purpose, String key, ActionListener listener) { ActionListener.completeWith(listener, () -> { final var getObjectRequest = new GetObjectRequest(blobStore.bucket(), buildKey(key)); - getObjectRequest.setRequestMetricCollector(blobStore.getMetricCollector(Operation.GET_OBJECT, purpose)); + S3BlobStore.configureRequestForMetrics(getObjectRequest, blobStore, Operation.GET_OBJECT, purpose); try ( var clientReference = blobStore.clientReference(); var s3Object = SocketAccess.doPrivileged(() -> clientReference.client().getObject(getObjectRequest)); diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java index 6b58026aba4f4..68def0598ef60 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java @@ -9,6 +9,7 @@ package org.elasticsearch.repositories.s3; import com.amazonaws.AmazonClientException; +import com.amazonaws.AmazonWebServiceRequest; import com.amazonaws.Request; import com.amazonaws.Response; import com.amazonaws.metrics.RequestMetricCollector; @@ -55,6 +56,8 @@ class S3BlobStore implements BlobStore { + public static final String CUSTOM_QUERY_PARAMETER_PURPOSE = "x-purpose"; + /** * Maximum number of deletes in a {@link DeleteObjectsRequest}. * @see S3 Documentation. @@ -341,9 +344,11 @@ private void deletePartition( } private static DeleteObjectsRequest bulkDelete(OperationPurpose purpose, S3BlobStore blobStore, List blobs) { - return new DeleteObjectsRequest(blobStore.bucket()).withKeys(blobs.toArray(Strings.EMPTY_ARRAY)) - .withQuiet(true) - .withRequestMetricCollector(blobStore.getMetricCollector(Operation.DELETE_OBJECTS, purpose)); + final DeleteObjectsRequest deleteObjectsRequest = new DeleteObjectsRequest(blobStore.bucket()).withKeys( + blobs.toArray(Strings.EMPTY_ARRAY) + ).withQuiet(true); + configureRequestForMetrics(deleteObjectsRequest, blobStore, Operation.DELETE_OBJECTS, purpose); + return deleteObjectsRequest; } @Override @@ -456,4 +461,14 @@ IgnoreNoResponseMetricsCollector buildMetricCollector(Operation operation, Opera return new IgnoreNoResponseMetricsCollector(operation, purpose); } } + + static void configureRequestForMetrics( + AmazonWebServiceRequest request, + S3BlobStore blobStore, + Operation operation, + OperationPurpose purpose + ) { + request.setRequestMetricCollector(blobStore.getMetricCollector(operation, purpose)); + request.putCustomQueryParameter(CUSTOM_QUERY_PARAMETER_PURPOSE, purpose.getKey()); + } } diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java index 0ebf6c54b49aa..c457b9d51e8b9 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java @@ -29,6 +29,7 @@ import java.util.List; import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.repositories.s3.S3BlobStore.configureRequestForMetrics; /** * Wrapper around an S3 object that will retry the {@link GetObjectRequest} if the download fails part-way through, resuming from where @@ -86,7 +87,7 @@ private void openStreamWithRetry() throws IOException { while (true) { try (AmazonS3Reference clientReference = blobStore.clientReference()) { final GetObjectRequest getObjectRequest = new GetObjectRequest(blobStore.bucket(), blobKey); - getObjectRequest.setRequestMetricCollector(blobStore.getMetricCollector(Operation.GET_OBJECT, purpose)); + configureRequestForMetrics(getObjectRequest, blobStore, Operation.GET_OBJECT, purpose); if (currentOffset > 0 || start > 0 || end < Long.MAX_VALUE - 1) { assert start + currentOffset <= end : "requesting beyond end, start = " + start + " offset=" + currentOffset + " end=" + end; diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java index 04a836997e0f7..0ddd29171b3bd 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java @@ -7,6 +7,8 @@ */ package org.elasticsearch.repositories.s3; +import fixture.s3.S3HttpHandler; + import com.amazonaws.DnsResolver; import com.amazonaws.SdkClientException; import com.amazonaws.services.s3.AmazonS3ClientBuilder; @@ -230,7 +232,10 @@ public void testWriteBlobWithRetries() throws Exception { final byte[] bytes = randomBlobContent(); httpServer.createContext(downloadStorageEndpoint(blobContainer, "write_blob_max_retries"), exchange -> { - if ("PUT".equals(exchange.getRequestMethod()) && exchange.getRequestURI().getQuery() == null) { + final S3HttpHandler.RequestComponents requestComponents = S3HttpHandler.parseRequestComponents( + S3HttpHandler.getRawRequestString(exchange) + ); + if ("PUT".equals(requestComponents.method()) && requestComponents.query().isEmpty()) { if (countDown.countDown()) { final BytesReference body = Streams.readFully(exchange.getRequestBody()); if (Objects.deepEquals(bytes, BytesReference.toBytes(body))) { @@ -319,9 +324,12 @@ public void testWriteLargeBlob() throws Exception { final CountDown countDownComplete = new CountDown(nbErrors); httpServer.createContext(downloadStorageEndpoint(blobContainer, "write_large_blob"), exchange -> { + final S3HttpHandler.RequestComponents requestComponents = S3HttpHandler.parseRequestComponents( + S3HttpHandler.getRawRequestString(exchange) + ); final long contentLength = Long.parseLong(exchange.getRequestHeaders().getFirst("Content-Length")); - if ("POST".equals(exchange.getRequestMethod()) && exchange.getRequestURI().getQuery().equals("uploads")) { + if ("POST".equals(requestComponents.method()) && requestComponents.query().equals("uploads")) { // initiate multipart upload request if (countDownInitiate.countDown()) { byte[] response = (""" @@ -337,9 +345,9 @@ public void testWriteLargeBlob() throws Exception { exchange.close(); return; } - } else if ("PUT".equals(exchange.getRequestMethod()) - && exchange.getRequestURI().getQuery().contains("uploadId=TEST") - && exchange.getRequestURI().getQuery().contains("partNumber=")) { + } else if ("PUT".equals(requestComponents.method()) + && requestComponents.query().contains("uploadId=TEST") + && requestComponents.query().contains("partNumber=")) { // upload part request MD5DigestCalculatingInputStream md5 = new MD5DigestCalculatingInputStream(exchange.getRequestBody()); BytesReference bytes = Streams.readFully(md5); @@ -353,7 +361,7 @@ public void testWriteLargeBlob() throws Exception { return; } - } else if ("POST".equals(exchange.getRequestMethod()) && exchange.getRequestURI().getQuery().equals("uploadId=TEST")) { + } else if ("POST".equals(requestComponents.method()) && requestComponents.query().equals("uploadId=TEST")) { // complete multipart upload request if (countDownComplete.countDown()) { Streams.readFully(exchange.getRequestBody()); @@ -418,9 +426,12 @@ public void testWriteLargeBlobStreaming() throws Exception { final CountDown countDownComplete = new CountDown(nbErrors); httpServer.createContext(downloadStorageEndpoint(blobContainer, "write_large_blob_streaming"), exchange -> { + final S3HttpHandler.RequestComponents requestComponents = S3HttpHandler.parseRequestComponents( + S3HttpHandler.getRawRequestString(exchange) + ); final long contentLength = Long.parseLong(exchange.getRequestHeaders().getFirst("Content-Length")); - if ("POST".equals(exchange.getRequestMethod()) && exchange.getRequestURI().getQuery().equals("uploads")) { + if ("POST".equals(requestComponents.method()) && requestComponents.query().equals("uploads")) { // initiate multipart upload request if (countDownInitiate.countDown()) { byte[] response = (""" @@ -436,9 +447,9 @@ public void testWriteLargeBlobStreaming() throws Exception { exchange.close(); return; } - } else if ("PUT".equals(exchange.getRequestMethod()) - && exchange.getRequestURI().getQuery().contains("uploadId=TEST") - && exchange.getRequestURI().getQuery().contains("partNumber=")) { + } else if ("PUT".equals(requestComponents.method()) + && requestComponents.query().contains("uploadId=TEST") + && requestComponents.query().contains("partNumber=")) { // upload part request MD5DigestCalculatingInputStream md5 = new MD5DigestCalculatingInputStream(exchange.getRequestBody()); BytesReference bytes = Streams.readFully(md5); @@ -451,7 +462,7 @@ public void testWriteLargeBlobStreaming() throws Exception { return; } - } else if ("POST".equals(exchange.getRequestMethod()) && exchange.getRequestURI().getQuery().equals("uploadId=TEST")) { + } else if ("POST".equals(requestComponents.method()) && requestComponents.query().equals("uploadId=TEST")) { // complete multipart upload request if (countDownComplete.countDown()) { Streams.readFully(exchange.getRequestBody()); diff --git a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpHandler.java b/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpHandler.java index 336b888dd7d3c..2698d96ab7ab9 100644 --- a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpHandler.java +++ b/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpHandler.java @@ -12,6 +12,7 @@ import com.sun.net.httpserver.HttpHandler; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.CompositeBytesReference; @@ -31,6 +32,7 @@ import java.io.PrintStream; import java.nio.charset.StandardCharsets; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; @@ -74,14 +76,21 @@ public S3HttpHandler(final String bucket, @Nullable final String basePath) { @Override public void handle(final HttpExchange exchange) throws IOException { - final String request = exchange.getRequestMethod() + " " + exchange.getRequestURI().toString(); + // Remove custom query parameters before processing the request. This simulates how S3 ignores them. + // https://docs.aws.amazon.com/AmazonS3/latest/userguide/LogFormat.html#LogFormatCustom + final RequestComponents requestComponents = parseRequestComponents( + exchange.getRequestMethod() + " " + exchange.getRequestURI().toString() + ); + final String request = requestComponents.request(); + onCustomQueryParameters(requestComponents.customQueryParameters); + if (request.startsWith("GET") || request.startsWith("HEAD") || request.startsWith("DELETE")) { int read = exchange.getRequestBody().read(); assert read == -1 : "Request body should have been empty but saw [" + read + "]"; } try { if (Regex.simpleMatch("HEAD /" + path + "/*", request)) { - final BytesReference blob = blobs.get(exchange.getRequestURI().getPath()); + final BytesReference blob = blobs.get(requestComponents.path); if (blob == null) { exchange.sendResponseHeaders(RestStatus.NOT_FOUND.getStatus(), -1); } else { @@ -89,8 +98,7 @@ public void handle(final HttpExchange exchange) throws IOException { } } else if (Regex.simpleMatch("GET /" + bucket + "/?uploads&prefix=*", request)) { final Map params = new HashMap<>(); - RestUtils.decodeQueryString(exchange.getRequestURI().getQuery(), 0, params); - + RestUtils.decodeQueryString(request, request.indexOf('?') + 1, params); final var prefix = params.get("prefix"); final var uploadsList = new StringBuilder(); @@ -120,10 +128,7 @@ public void handle(final HttpExchange exchange) throws IOException { exchange.getResponseBody().write(response); } else if (Regex.simpleMatch("POST /" + path + "/*?uploads", request)) { - final var upload = new MultipartUpload( - UUIDs.randomBase64UUID(), - exchange.getRequestURI().getPath().substring(bucket.length() + 2) - ); + final var upload = new MultipartUpload(UUIDs.randomBase64UUID(), requestComponents.path.substring(bucket.length() + 2)); uploads.put(upload.getUploadId(), upload); final var uploadResult = new StringBuilder(); @@ -141,7 +146,7 @@ public void handle(final HttpExchange exchange) throws IOException { } else if (Regex.simpleMatch("PUT /" + path + "/*?uploadId=*&partNumber=*", request)) { final Map params = new HashMap<>(); - RestUtils.decodeQueryString(exchange.getRequestURI().getQuery(), 0, params); + RestUtils.decodeQueryString(request, request.indexOf('?') + 1, params); final var upload = uploads.get(params.get("uploadId")); if (upload == null) { @@ -154,15 +159,14 @@ public void handle(final HttpExchange exchange) throws IOException { } } else if (Regex.simpleMatch("POST /" + path + "/*?uploadId=*", request)) { - final Map params = new HashMap<>(); - RestUtils.decodeQueryString(exchange.getRequestURI().getQuery(), 0, params); + RestUtils.decodeQueryString(request, request.indexOf('?') + 1, params); final var upload = uploads.remove(params.get("uploadId")); if (upload == null) { exchange.sendResponseHeaders(RestStatus.NOT_FOUND.getStatus(), -1); } else { final var blobContents = upload.complete(extractPartEtags(Streams.readFully(exchange.getRequestBody()))); - blobs.put(exchange.getRequestURI().getPath(), blobContents); + blobs.put(requestComponents.path, blobContents); byte[] response = ("\n" + "\n" @@ -170,7 +174,7 @@ public void handle(final HttpExchange exchange) throws IOException { + bucket + "\n" + "" - + exchange.getRequestURI().getPath() + + requestComponents.path + "\n" + "").getBytes(StandardCharsets.UTF_8); exchange.getResponseHeaders().add("Content-Type", "application/xml"); @@ -179,19 +183,19 @@ public void handle(final HttpExchange exchange) throws IOException { } } else if (Regex.simpleMatch("DELETE /" + path + "/*?uploadId=*", request)) { final Map params = new HashMap<>(); - RestUtils.decodeQueryString(exchange.getRequestURI().getQuery(), 0, params); + RestUtils.decodeQueryString(request, request.indexOf('?') + 1, params); final var upload = uploads.remove(params.get("uploadId")); exchange.sendResponseHeaders((upload == null ? RestStatus.NOT_FOUND : RestStatus.NO_CONTENT).getStatus(), -1); } else if (Regex.simpleMatch("PUT /" + path + "/*", request)) { final Tuple blob = parseRequestBody(exchange); - blobs.put(exchange.getRequestURI().toString(), blob.v2()); + blobs.put(requestComponents.uri(), blob.v2()); exchange.getResponseHeaders().add("ETag", blob.v1()); exchange.sendResponseHeaders(RestStatus.OK.getStatus(), -1); } else if (Regex.simpleMatch("GET /" + bucket + "/?prefix=*", request)) { final Map params = new HashMap<>(); - RestUtils.decodeQueryString(exchange.getRequestURI().getQuery(), 0, params); + RestUtils.decodeQueryString(request, request.indexOf('?') + 1, params); if (params.get("list-type") != null) { throw new AssertionError("Test must be adapted for GET Bucket (List Objects) Version 2"); } @@ -240,7 +244,7 @@ public void handle(final HttpExchange exchange) throws IOException { exchange.getResponseBody().write(response); } else if (Regex.simpleMatch("GET /" + path + "/*", request)) { - final BytesReference blob = blobs.get(exchange.getRequestURI().toString()); + final BytesReference blob = blobs.get(requestComponents.uri()); if (blob != null) { final String range = exchange.getRequestHeaders().getFirst("Range"); if (range == null) { @@ -271,7 +275,7 @@ public void handle(final HttpExchange exchange) throws IOException { int deletions = 0; for (Iterator> iterator = blobs.entrySet().iterator(); iterator.hasNext();) { Map.Entry blob = iterator.next(); - if (blob.getKey().startsWith(exchange.getRequestURI().toString())) { + if (blob.getKey().startsWith(requestComponents.uri())) { iterator.remove(); deletions++; } @@ -311,6 +315,42 @@ public Map blobs() { return blobs; } + protected void onCustomQueryParameters(final Map> params) {} + + public static RequestComponents parseRequestComponents(final String request) { + final int spacePos = request.indexOf(' '); + final String method = request.substring(0, spacePos); + final String uriString = request.substring(spacePos + 1); + final int questsionMarkPos = uriString.indexOf('?'); + // AWS s3 allows the same custom query parameter to be specified multiple times + final Map> customQueryParameters = new HashMap<>(); + if (questsionMarkPos == -1) { + return new RequestComponents(method, uriString, "", customQueryParameters); + } else { + final String queryString = uriString.substring(questsionMarkPos + 1); + final ArrayList queryParameters = new ArrayList<>(); + Arrays.stream(Strings.tokenizeToStringArray(queryString, "&")).forEach(param -> { + if (param.startsWith("x-")) { + final int equalPos = param.indexOf("="); + customQueryParameters.computeIfAbsent(param.substring(0, equalPos), k -> new ArrayList<>()) + .add(param.substring(equalPos + 1)); + } else { + queryParameters.add(param); + } + }); + return new RequestComponents( + method, + uriString.substring(0, questsionMarkPos), + Strings.collectionToDelimitedString(queryParameters, "&"), + customQueryParameters + ); + } + } + + public static String getRawRequestString(final HttpExchange exchange) { + return exchange.getRequestMethod() + " " + exchange.getRequestURI(); + } + private static final Pattern chunkSignaturePattern = Pattern.compile("^([0-9a-z]+);chunk-signature=([^\\r\\n]*)$"); private static Tuple parseRequestBody(final HttpExchange exchange) throws IOException { @@ -475,4 +515,19 @@ public static void sendError(final HttpExchange exchange, final RestStatus statu MultipartUpload getUpload(String uploadId) { return uploads.get(uploadId); } + + public record RequestComponents(String method, String path, String query, Map> customQueryParameters) { + + public String request() { + return method + " " + uri(); + } + + public String uri() { + if (query.isEmpty()) { + return path; + } else { + return path + "?" + query; + } + } + } } From e85bb5afc30b3eb16253591e59b2d724496d67d3 Mon Sep 17 00:00:00 2001 From: Kostas Krikellas <131142368+kkrik-es@users.noreply.github.com> Date: Mon, 5 Feb 2024 09:31:13 +0200 Subject: [PATCH 010/106] Nest pass-through objects within objects (#105062) * Fix test failure https://gradle-enterprise.elastic.co/s/icg66i6mwnjoi * Fix test failure https://gradle-enterprise.elastic.co/s/icg66i6mwnjoi * Nest pass-through objects within objects * Update docs/changelog/105062.yaml * improve test --- docs/changelog/105062.yaml | 5 + .../test/data_stream/150_tsdb.yml | 136 ++++++++++++--- .../index/mapper/ObjectMapper.java | 23 +-- .../index/mapper/PassThroughObjectMapper.java | 9 +- .../index/mapper/RootObjectMapper.java | 18 +- .../index/mapper/DynamicTemplatesTests.java | 2 +- .../index/mapper/ObjectMapperTests.java | 4 +- .../index/mapper/RootObjectMapperTests.java | 165 +++++++++++++++--- 8 files changed, 284 insertions(+), 78 deletions(-) create mode 100644 docs/changelog/105062.yaml diff --git a/docs/changelog/105062.yaml b/docs/changelog/105062.yaml new file mode 100644 index 0000000000000..928786f62381a --- /dev/null +++ b/docs/changelog/105062.yaml @@ -0,0 +1,5 @@ +pr: 105062 +summary: Nest pass-through objects within objects +area: TSDB +type: enhancement +issues: [] diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml index 794b27612fdd8..81ede2b045e61 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/150_tsdb.yml @@ -381,12 +381,12 @@ dynamic templates - conflicting aliases: - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } --- -dynamic templates - subobject in passthrough object error: +dynamic templates with nesting: - skip: version: " - 8.12.99" + features: "default_shards" reason: "Support for dynamic fields was added in 8.13" - do: - catch: /Tried to add subobject \[subcategory\] to object \[attributes\] which does not support subobjects/ indices.put_index_template: name: my-dynamic-template body: @@ -395,21 +395,116 @@ dynamic templates - subobject in passthrough object error: template: settings: index: + number_of_shards: 1 mode: time_series + time_series: + start_time: 2023-08-31T13:03:08.138Z mappings: properties: attributes: type: passthrough + dynamic: true + time_series_dimension: true + resource: + type: object properties: - subcategory: - type: object - properties: - dim: - type: keyword + attributes: + type: passthrough + dynamic: true + time_series_dimension: true + dynamic_templates: + - counter_metric: + mapping: + type: integer + time_series_metric: counter - do: - catch: /Mapping definition for \[attributes\] has unsupported parameters:\ \[subobjects \:\ true\]/ + bulk: + index: k9s + refresh: true + body: + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:03:08.138Z","data": "10", "resource.attributes.dim": "A", "resource.attributes.another.dim": "C", "attributes.more.dim": "E" }' + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:03:09.138Z","data": "20", "resource.attributes.dim": "A", "resource.attributes.another.dim": "C", "attributes.more.dim": "E" }' + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:03:10.138Z","data": "30", "resource.attributes.dim": "B", "resource.attributes.another.dim": "D", "attributes.more.dim": "F" }' + - '{ "create": { "dynamic_templates": { "data": "counter_metric" } } }' + - '{ "@timestamp": "2023-09-01T13:03:10.238Z","data": "40", "resource.attributes.dim": "B", "resource.attributes.another.dim": "D", "attributes.more.dim": "F" }' + + - do: + search: + index: k9s + body: + size: 0 + + - match: { hits.total.value: 4 } + + - do: + search: + index: k9s + body: + size: 0 + aggs: + filterA: + filter: + term: + dim: A + aggs: + tsids: + terms: + field: _tsid + + - length: { aggregations.filterA.tsids.buckets: 1 } + - match: { aggregations.filterA.tsids.buckets.0.key: "LEjiJ4ATCXWlzeFvhGQ9lYlnP-nRIGKYihfZ18WoJ94t9a8OpbsCdwZALomb" } + - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } + + - do: + search: + index: k9s + body: + size: 0 + aggs: + filterA: + filter: + term: + another.dim: C + aggs: + tsids: + terms: + field: _tsid + + - length: { aggregations.filterA.tsids.buckets: 1 } + - match: { aggregations.filterA.tsids.buckets.0.key: "LEjiJ4ATCXWlzeFvhGQ9lYlnP-nRIGKYihfZ18WoJ94t9a8OpbsCdwZALomb" } + - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } + + - do: + search: + index: k9s + body: + size: 0 + aggs: + filterA: + filter: + term: + more.dim: E + aggs: + tsids: + terms: + field: _tsid + + - length: { aggregations.filterA.tsids.buckets: 1 } + - match: { aggregations.filterA.tsids.buckets.0.key: "LEjiJ4ATCXWlzeFvhGQ9lYlnP-nRIGKYihfZ18WoJ94t9a8OpbsCdwZALomb" } + - match: { aggregations.filterA.tsids.buckets.0.doc_count: 2 } + +--- +dynamic templates - subobject in passthrough object error: + - skip: + version: " - 8.12.99" + reason: "Support for dynamic fields was added in 8.13" + - do: + catch: /Tried to add subobject \[subcategory\] to object \[attributes\] which does not support subobjects/ indices.put_index_template: name: my-dynamic-template body: @@ -418,24 +513,21 @@ dynamic templates - subobject in passthrough object error: template: settings: index: - number_of_shards: 1 mode: time_series - time_series: - start_time: 2023-08-31T13:03:08.138Z mappings: properties: attributes: type: passthrough - subobjects: true + properties: + subcategory: + type: object + properties: + dim: + type: keyword ---- -dynamic templates - passthrough not under root error: - - skip: - version: " - 8.12.99" - reason: "Support for dynamic fields was added in 8.13" - do: - catch: /Tried to add passthrough subobject \[attributes\] to object \[resource\], passthrough is not supported as a subobject/ + catch: /Mapping definition for \[attributes\] has unsupported parameters:\ \[subobjects \:\ true\]/ indices.put_index_template: name: my-dynamic-template body: @@ -444,11 +536,13 @@ dynamic templates - passthrough not under root error: template: settings: index: + number_of_shards: 1 mode: time_series + time_series: + start_time: 2023-08-31T13:03:08.138Z mappings: properties: - "resource.attributes": + attributes: type: passthrough - dynamic: true - time_series_dimension: true + subobjects: true diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java index 1ed9713d73e75..9d7353859ed25 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java @@ -295,7 +295,10 @@ protected static void parseProperties( } } - if (objBuilder.subobjects.value() == false && type.equals(ObjectMapper.CONTENT_TYPE)) { + if (objBuilder.subobjects.value() == false + && (type.equals(ObjectMapper.CONTENT_TYPE) + || type.equals(NestedObjectMapper.CONTENT_TYPE) + || type.equals(PassThroughObjectMapper.CONTENT_TYPE))) { throw new MapperParsingException( "Tried to add subobject [" + fieldName @@ -304,24 +307,6 @@ protected static void parseProperties( + "] which does not support subobjects" ); } - if (objBuilder.subobjects.value() == false && type.equals(NestedObjectMapper.CONTENT_TYPE)) { - throw new MapperParsingException( - "Tried to add nested object [" - + fieldName - + "] to object [" - + objBuilder.name() - + "] which does not support subobjects" - ); - } - if (type.equals(PassThroughObjectMapper.CONTENT_TYPE) && objBuilder instanceof RootObjectMapper.Builder == false) { - throw new MapperParsingException( - "Tried to add passthrough subobject [" - + fieldName - + "] to object [" - + objBuilder.name() - + "], passthrough is not supported as a subobject" - ); - } Mapper.TypeParser typeParser = parserContext.typeParser(type); if (typeParser == null) { throw new MapperParsingException("No handler for type [" + type + "] declared on field [" + fieldName + "]"); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java index 7688b217ab7fc..b49c9328fcc79 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/PassThroughObjectMapper.java @@ -57,6 +57,7 @@ public PassThroughObjectMapper.Builder setContainsDimensions() { public PassThroughObjectMapper build(MapperBuilderContext context) { return new PassThroughObjectMapper( name, + context.buildFullName(name), enabled, dynamic, buildMappers(context.createChildContext(name)), @@ -70,19 +71,20 @@ public PassThroughObjectMapper build(MapperBuilderContext context) { PassThroughObjectMapper( String name, + String fullPath, Explicit enabled, Dynamic dynamic, Map mappers, Explicit timeSeriesDimensionSubFields ) { // Subobjects are not currently supported. - super(name, name, enabled, Explicit.IMPLICIT_FALSE, dynamic, mappers); + super(name, fullPath, enabled, Explicit.IMPLICIT_FALSE, dynamic, mappers); this.timeSeriesDimensionSubFields = timeSeriesDimensionSubFields; } @Override PassThroughObjectMapper withoutMappers() { - return new PassThroughObjectMapper(simpleName(), enabled, dynamic, Map.of(), timeSeriesDimensionSubFields); + return new PassThroughObjectMapper(simpleName(), fullPath(), enabled, dynamic, Map.of(), timeSeriesDimensionSubFields); } public boolean containsDimensions() { @@ -91,7 +93,7 @@ public boolean containsDimensions() { @Override public PassThroughObjectMapper.Builder newBuilder(IndexVersion indexVersionCreated) { - PassThroughObjectMapper.Builder builder = new PassThroughObjectMapper.Builder(name()); + PassThroughObjectMapper.Builder builder = new PassThroughObjectMapper.Builder(simpleName()); builder.enabled = enabled; builder.dynamic = dynamic; builder.timeSeriesDimensionSubFields = timeSeriesDimensionSubFields; @@ -108,6 +110,7 @@ public PassThroughObjectMapper merge(ObjectMapper mergeWith, MergeReason reason, return new PassThroughObjectMapper( simpleName(), + fullPath(), mergeResult.enabled(), mergeResult.dynamic(), mergeResult.mappers(), diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java index 86bdb2aa2bba7..7994c018f40f2 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java @@ -45,6 +45,7 @@ public class RootObjectMapper extends ObjectMapper { private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(RootObjectMapper.class); + private static final int MAX_NESTING_LEVEL_FOR_PASS_THROUGH_OBJECTS = 20; /** * Parameter used when serializing {@link RootObjectMapper} and request that the runtime section is skipped. @@ -111,7 +112,11 @@ public RootObjectMapper.Builder addRuntimeFields(Map runti @Override public RootObjectMapper build(MapperBuilderContext context) { Map mappers = buildMappers(context); - mappers.putAll(getAliasMappers(mappers, context)); + + Map aliasMappers = new HashMap<>(); + getAliasMappers(mappers, aliasMappers, context, 0); + mappers.putAll(aliasMappers); + return new RootObjectMapper( name, enabled, @@ -126,8 +131,11 @@ public RootObjectMapper build(MapperBuilderContext context) { ); } - Map getAliasMappers(Map mappers, MapperBuilderContext context) { - Map aliasMappers = new HashMap<>(); + void getAliasMappers(Map mappers, Map aliasMappers, MapperBuilderContext context, int level) { + if (level >= MAX_NESTING_LEVEL_FOR_PASS_THROUGH_OBJECTS) { + logger.warn("Exceeded maximum nesting level for searching for pass-through object fields within object fields."); + return; + } for (Mapper mapper : mappers.values()) { // Create aliases for all fields in child passthrough mappers and place them under the root object. if (mapper instanceof PassThroughObjectMapper passthroughMapper) { @@ -154,9 +162,11 @@ Map getAliasMappers(Map mappers, MapperBuilderCo } } } + } else if (mapper instanceof ObjectMapper objectMapper) { + // Call recursively to check child fields. The level guards against long recursive call sequences. + getAliasMappers(objectMapper.mappers, aliasMappers, context, level + 1); } } - return aliasMappers; } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java index 54db5832c2726..10bd6c667c26e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java @@ -1391,7 +1391,7 @@ public void testSubobjectsFalseWithInnerNestedFromDynamicTemplate() { ); assertThat(exception.getRootCause(), instanceOf(MapperParsingException.class)); assertEquals( - "Tried to add nested object [time] to object [__dynamic__test] which does not support subobjects", + "Tried to add subobject [time] to object [__dynamic__test] which does not support subobjects", exception.getRootCause().getMessage() ); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java index 6e958ddbea904..cbb0929b813fc 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java @@ -407,7 +407,7 @@ public void testSubobjectsFalseWithInnerNested() { b.endObject(); }))); assertEquals( - "Failed to parse mapping: Tried to add nested object [time] to object [service] which does not support subobjects", + "Failed to parse mapping: Tried to add subobject [time] to object [service] which does not support subobjects", exception.getMessage() ); } @@ -457,7 +457,7 @@ public void testSubobjectsFalseRootWithInnerNested() { b.endObject(); }))); assertEquals( - "Failed to parse mapping: Tried to add nested object [metrics.service] to object [_doc] which does not support subobjects", + "Failed to parse mapping: Tried to add subobject [metrics.service] to object [_doc] which does not support subobjects", exception.getMessage() ); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java index b77019806fc4f..662a809e6d065 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java @@ -360,50 +360,159 @@ public void testPassThroughObjectWithAliases() throws IOException { assertThat(mapperService.mappingLookup().getMapper("labels.dim"), instanceOf(KeywordFieldMapper.class)); } + public void testPassThroughObjectNested() throws IOException { + MapperService mapperService = createMapperService(mapping(b -> { + b.startObject("resource").field("type", "object"); + { + b.startObject("properties"); + { + b.startObject("attributes").field("type", "passthrough"); + { + b.startObject("properties"); + b.startObject("dim").field("type", "keyword").endObject(); + b.endObject(); + } + b.endObject(); + } + b.endObject(); + } + b.endObject(); + b.startObject("attributes").field("type", "passthrough"); + { + b.startObject("properties"); + b.startObject("another.dim").field("type", "keyword").endObject(); + b.endObject(); + } + b.endObject(); + })); + assertThat(mapperService.mappingLookup().getMapper("dim"), instanceOf(FieldAliasMapper.class)); + assertThat(mapperService.mappingLookup().getMapper("resource.attributes.dim"), instanceOf(KeywordFieldMapper.class)); + assertThat(mapperService.mappingLookup().getMapper("another.dim"), instanceOf(FieldAliasMapper.class)); + assertThat(mapperService.mappingLookup().getMapper("attributes.another.dim"), instanceOf(KeywordFieldMapper.class)); + } + public void testAliasMappersCreatesAlias() throws Exception { var context = MapperBuilderContext.root(false, false); - - Map fields = new HashMap<>(); - fields.put("host", new KeywordFieldMapper.Builder("host", IndexVersion.current()).build(context)); - - Map mappers = new HashMap<>(); - mappers.put( - "labels", - new PassThroughObjectMapper("labels", Explicit.EXPLICIT_TRUE, ObjectMapper.Dynamic.FALSE, fields, Explicit.EXPLICIT_FALSE) + Map aliases = new HashMap<>(); + new RootObjectMapper.Builder("root", Explicit.EXPLICIT_FALSE).getAliasMappers( + Map.of( + "labels", + new PassThroughObjectMapper( + "labels", + "labels", + Explicit.EXPLICIT_TRUE, + ObjectMapper.Dynamic.FALSE, + Map.of("host", new KeywordFieldMapper.Builder("host", IndexVersion.current()).build(context)), + Explicit.EXPLICIT_FALSE + ) + ), + aliases, + context, + 0 ); - - Map aliases = new RootObjectMapper.Builder("root", Explicit.EXPLICIT_FALSE).getAliasMappers(mappers, context); assertEquals(1, aliases.size()); assertThat(aliases.get("host"), instanceOf(FieldAliasMapper.class)); } - public void testAliasMappersCreatesNoAliasForRegularObject() throws Exception { + public void testAliasMappersCreatesAliasNested() throws Exception { var context = MapperBuilderContext.root(false, false); + Map aliases = new HashMap<>(); + new RootObjectMapper.Builder("root", Explicit.EXPLICIT_FALSE).getAliasMappers( + Map.of( + "outer", + new ObjectMapper( + "outer", + "outer", + Explicit.EXPLICIT_TRUE, + Explicit.EXPLICIT_TRUE, + ObjectMapper.Dynamic.FALSE, + Map.of( + "inner", + new PassThroughObjectMapper( + "inner", + "outer.inner", + Explicit.EXPLICIT_TRUE, + ObjectMapper.Dynamic.FALSE, + Map.of("host", new KeywordFieldMapper.Builder("host", IndexVersion.current()).build(context)), + Explicit.EXPLICIT_FALSE + ) + ) + ) + ), + aliases, + context, + 0 + ); + assertEquals(1, aliases.size()); + assertThat(aliases.get("host"), instanceOf(FieldAliasMapper.class)); + } - Map fields = new HashMap<>(); - fields.put("host", new KeywordFieldMapper.Builder("host", IndexVersion.current()).build(context)); + public void testAliasMappersExitsInDeepNesting() throws Exception { + var context = MapperBuilderContext.root(false, false); + Map aliases = new HashMap<>(); + new RootObjectMapper.Builder("root", Explicit.EXPLICIT_FALSE).getAliasMappers( + Map.of( + "labels", + new PassThroughObjectMapper( + "labels", + "labels", + Explicit.EXPLICIT_TRUE, + ObjectMapper.Dynamic.FALSE, + Map.of("host", new KeywordFieldMapper.Builder("host", IndexVersion.current()).build(context)), + Explicit.EXPLICIT_FALSE + ) + ), + aliases, + context, + 1_000_000 + ); + assertTrue(aliases.isEmpty()); + } - Map mappers = new HashMap<>(); - mappers.put( - "labels", - new ObjectMapper("labels", "labels", Explicit.EXPLICIT_TRUE, Explicit.EXPLICIT_FALSE, ObjectMapper.Dynamic.FALSE, fields) + public void testAliasMappersCreatesNoAliasForRegularObject() throws Exception { + var context = MapperBuilderContext.root(false, false); + Map aliases = new HashMap<>(); + new RootObjectMapper.Builder("root", Explicit.EXPLICIT_FALSE).getAliasMappers( + Map.of( + "labels", + new ObjectMapper( + "labels", + "labels", + Explicit.EXPLICIT_TRUE, + Explicit.EXPLICIT_FALSE, + ObjectMapper.Dynamic.FALSE, + Map.of("host", new KeywordFieldMapper.Builder("host", IndexVersion.current()).build(context)) + ) + ), + aliases, + context, + 0 ); - assertTrue(new RootObjectMapper.Builder("root", Explicit.EXPLICIT_FALSE).getAliasMappers(mappers, context).isEmpty()); + assertTrue(aliases.isEmpty()); } public void testAliasMappersConflictingField() throws Exception { var context = MapperBuilderContext.root(false, false); - - Map fields = new HashMap<>(); - fields.put("host", new KeywordFieldMapper.Builder("host", IndexVersion.current()).build(context)); - - Map mappers = new HashMap<>(); - mappers.put( - "labels", - new PassThroughObjectMapper("labels", Explicit.EXPLICIT_TRUE, ObjectMapper.Dynamic.FALSE, fields, Explicit.EXPLICIT_FALSE) + Map aliases = new HashMap<>(); + new RootObjectMapper.Builder("root", Explicit.EXPLICIT_FALSE).getAliasMappers( + Map.of( + "labels", + new PassThroughObjectMapper( + "labels", + "labels", + Explicit.EXPLICIT_TRUE, + ObjectMapper.Dynamic.FALSE, + Map.of("host", new KeywordFieldMapper.Builder("host", IndexVersion.current()).build(context)), + Explicit.EXPLICIT_FALSE + ), + "host", + new KeywordFieldMapper.Builder("host", IndexVersion.current()).build(context) + ), + aliases, + context, + 0 ); - mappers.put("host", new KeywordFieldMapper.Builder("host", IndexVersion.current()).build(context)); - assertTrue(new RootObjectMapper.Builder("root", Explicit.EXPLICIT_FALSE).getAliasMappers(mappers, context).isEmpty()); + assertTrue(aliases.isEmpty()); } public void testEmptyType() throws Exception { From b328982e37358122d765360119e1399e56bdcd45 Mon Sep 17 00:00:00 2001 From: Jan Kuipers <148754765+jan-elastic@users.noreply.github.com> Date: Mon, 5 Feb 2024 10:08:12 +0100 Subject: [PATCH 011/106] Refactor DataExtractor summary (#105011) * Empty getDataSummary methods * Move chunking logic from DataSummary to Chunker. * Move DataSummary to ScrollDataExtractor. * Move DataSummary to AbstractAggregationDataExtractor. * Remove ununsed code * Make ChunkedDataExtractorContext a record * Remove more unused code * Add tests for getSummary() * Implement CompositeAggregationDataExtractor::getSummary(). * More unused code * Move shared code to DataExtractorQueryContext. * Move shared code to DataExtractorUtils. * Lint fixes * Move checkForSkippedClusters to DataExtractorUtils * Replace monkey patching by ArgumentCaptor. * Add checkForSkippedClusters to AbstractAggregationDataExtractor::executeSearchRequest * Fix DataSummary for rollups * Add documentation --- .../ml/datafeed/extractor/DataExtractor.java | 28 +- .../extractor/DataExtractorFactory.java | 3 +- .../extractor/DataExtractorQueryContext.java | 47 ++ .../extractor/DataExtractorUtils.java | 71 +++- .../AbstractAggregationDataExtractor.java | 68 ++- .../aggregation/AggregationDataExtractor.java | 6 +- .../AggregationDataExtractorContext.java | 31 +- .../CompositeAggregationDataExtractor.java | 52 ++- ...positeAggregationDataExtractorContext.java | 28 +- .../aggregation/RollupDataExtractor.java | 6 +- .../chunked/ChunkedDataExtractor.java | 299 ++----------- .../chunked/ChunkedDataExtractorContext.java | 66 +-- .../chunked/ChunkedDataExtractorFactory.java | 30 +- .../extractor/scroll/ScrollDataExtractor.java | 50 ++- .../scroll/ScrollDataExtractorContext.java | 28 +- .../AggregationDataExtractorFactoryTests.java | 8 +- .../AggregationDataExtractorTests.java | 173 +++++--- ...ompositeAggregationDataExtractorTests.java | 174 +++++--- .../ChunkedDataExtractorFactoryTests.java | 19 +- .../chunked/ChunkedDataExtractorTests.java | 401 ++++++------------ .../scroll/ScrollDataExtractorTests.java | 47 ++ 21 files changed, 767 insertions(+), 868 deletions(-) create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorQueryContext.java diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractor.java index 991916333f4cf..0ea1914d3e14b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractor.java @@ -6,8 +6,6 @@ */ package org.elasticsearch.xpack.ml.datafeed.extractor; -import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.xpack.core.ml.datafeed.SearchInterval; import java.io.IOException; @@ -18,6 +16,14 @@ public interface DataExtractor { record Result(SearchInterval searchInterval, Optional data) {} + record DataSummary(Long earliestTime, Long latestTime, long totalHits) { + public boolean hasData() { + return earliestTime != null; + } + } + + DataSummary getSummary(); + /** * @return {@code true} if the search has not finished yet, or {@code false} otherwise */ @@ -50,22 +56,4 @@ record Result(SearchInterval searchInterval, Optional data) {} * @return the end time to which this extractor will search */ long getEndTime(); - - /** - * Check whether the search skipped CCS clusters. - * @throws ResourceNotFoundException if any CCS clusters were skipped, as this could - * cause anomalies to be spuriously detected. - * @param searchResponse The search response to check for skipped CCS clusters. - */ - default void checkForSkippedClusters(SearchResponse searchResponse) { - SearchResponse.Clusters clusterResponse = searchResponse.getClusters(); - if (clusterResponse != null && clusterResponse.getClusterStateCount(SearchResponse.Cluster.Status.SKIPPED) > 0) { - throw new ResourceNotFoundException( - "[{}] remote clusters out of [{}] were skipped when performing datafeed search", - clusterResponse.getClusterStateCount(SearchResponse.Cluster.Status.SKIPPED), - clusterResponse.getTotal() - ); - } - } - } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactory.java index 3175891aa4d6e..be2c8dd871a9b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactory.java @@ -28,6 +28,7 @@ import org.elasticsearch.xpack.ml.datafeed.extractor.scroll.ScrollDataExtractorFactory; public interface DataExtractorFactory { + DataExtractor newExtractor(long start, long end); /** @@ -61,7 +62,7 @@ static void create( ActionListener factoryHandler = ActionListener.wrap( factory -> listener.onResponse( datafeed.getChunkingConfig().isEnabled() - ? new ChunkedDataExtractorFactory(client, datafeed, extraFilters, job, xContentRegistry, factory, timingStatsReporter) + ? new ChunkedDataExtractorFactory(datafeed, job, xContentRegistry, factory) : factory ), listener::onFailure diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorQueryContext.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorQueryContext.java new file mode 100644 index 0000000000000..8ba901f82d351 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorQueryContext.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.datafeed.extractor; + +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.index.query.QueryBuilder; + +import java.util.List; +import java.util.Map; +import java.util.Objects; + +public class DataExtractorQueryContext { + + public final String[] indices; + public final QueryBuilder query; + public final String timeField; + public final long start; + public final long end; + public final Map headers; + public final IndicesOptions indicesOptions; + public final Map runtimeMappings; + + public DataExtractorQueryContext( + List indices, + QueryBuilder query, + String timeField, + long start, + long end, + Map headers, + IndicesOptions indicesOptions, + Map runtimeMappings + ) { + this.indices = indices.toArray(new String[0]); + this.query = Objects.requireNonNull(query); + this.timeField = timeField; + this.start = start; + this.end = end; + this.headers = headers; + this.indicesOptions = Objects.requireNonNull(indicesOptions); + this.runtimeMappings = Objects.requireNonNull(runtimeMappings); + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorUtils.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorUtils.java index 0f6ae6f90fb52..f0e03a1e94973 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorUtils.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorUtils.java @@ -7,9 +7,18 @@ package org.elasticsearch.xpack.ml.datafeed.extractor; +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.client.internal.Client; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.RangeQueryBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.aggregations.metrics.Max; +import org.elasticsearch.search.aggregations.metrics.Min; +import org.elasticsearch.search.builder.SearchSourceBuilder; /** * Utility methods for various DataExtractor implementations. @@ -17,12 +26,70 @@ public final class DataExtractorUtils { private static final String EPOCH_MILLIS = "epoch_millis"; + private static final String EARLIEST_TIME = "earliest_time"; + private static final String LATEST_TIME = "latest_time"; + + private DataExtractorUtils() {} /** * Combines a user query with a time range query. */ - public static QueryBuilder wrapInTimeRangeQuery(QueryBuilder userQuery, String timeField, long start, long end) { + public static QueryBuilder wrapInTimeRangeQuery(QueryBuilder query, String timeField, long start, long end) { QueryBuilder timeQuery = new RangeQueryBuilder(timeField).gte(start).lt(end).format(EPOCH_MILLIS); - return new BoolQueryBuilder().filter(userQuery).filter(timeQuery); + return new BoolQueryBuilder().filter(query).filter(timeQuery); + } + + public static SearchRequestBuilder getSearchRequestBuilderForSummary(Client client, DataExtractorQueryContext context) { + return new SearchRequestBuilder(client).setIndices(context.indices) + .setIndicesOptions(context.indicesOptions) + .setSource(getSearchSourceBuilderForSummary(context)) + .setAllowPartialSearchResults(false) + .setTrackTotalHits(true); + } + + public static SearchSourceBuilder getSearchSourceBuilderForSummary(DataExtractorQueryContext context) { + return new SearchSourceBuilder().size(0) + .query(DataExtractorUtils.wrapInTimeRangeQuery(context.query, context.timeField, context.start, context.end)) + .runtimeMappings(context.runtimeMappings) + .aggregation(AggregationBuilders.min(EARLIEST_TIME).field(context.timeField)) + .aggregation(AggregationBuilders.max(LATEST_TIME).field(context.timeField)); + } + + public static DataExtractor.DataSummary getDataSummary(SearchResponse searchResponse) { + InternalAggregations aggregations = searchResponse.getAggregations(); + if (aggregations == null) { + return new DataExtractor.DataSummary(null, null, 0L); + } else { + Long earliestTime = toLongIfFinite((aggregations.get(EARLIEST_TIME)).value()); + Long latestTime = toLongIfFinite((aggregations.get(LATEST_TIME)).value()); + long totalHits = searchResponse.getHits().getTotalHits().value; + return new DataExtractor.DataSummary(earliestTime, latestTime, totalHits); + } + } + + /** + * The min and max aggregations return infinity when there is no data. To ensure consistency + * between the different types of data summary we represent no data by earliest and latest times + * being null. Hence, this method converts infinite values to null. + */ + private static Long toLongIfFinite(double x) { + return Double.isFinite(x) ? (long) x : null; + } + + /** + * Check whether the search skipped CCS clusters. + * @throws ResourceNotFoundException if any CCS clusters were skipped, as this could + * cause anomalies to be spuriously detected. + * @param searchResponse The search response to check for skipped CCS clusters. + */ + public static void checkForSkippedClusters(SearchResponse searchResponse) { + SearchResponse.Clusters clusterResponse = searchResponse.getClusters(); + if (clusterResponse != null && clusterResponse.getClusterStateCount(SearchResponse.Cluster.Status.SKIPPED) > 0) { + throw new ResourceNotFoundException( + "[{}] remote clusters out of [{}] were skipped when performing datafeed search", + clusterResponse.getClusterStateCount(SearchResponse.Cluster.Status.SKIPPED), + clusterResponse.getTotal() + ); + } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AbstractAggregationDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AbstractAggregationDataExtractor.java index 4cd5379d8fe3b..26c43e1d098c1 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AbstractAggregationDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AbstractAggregationDataExtractor.java @@ -34,10 +34,8 @@ /** * Abstract class for aggregated data extractors, e.g. {@link RollupDataExtractor} - * - * @param The request builder type for getting data from ElasticSearch */ -abstract class AbstractAggregationDataExtractor> implements DataExtractor { +abstract class AbstractAggregationDataExtractor implements DataExtractor { private static final Logger LOGGER = LogManager.getLogger(AbstractAggregationDataExtractor.class); @@ -86,7 +84,7 @@ public void destroy() { @Override public long getEndTime() { - return context.end; + return context.queryContext.end; } @Override @@ -95,7 +93,7 @@ public Result next() throws IOException { throw new NoSuchElementException(); } - SearchInterval searchInterval = new SearchInterval(context.start, context.end); + SearchInterval searchInterval = new SearchInterval(context.queryContext.start, context.queryContext.end); if (aggregationToJsonProcessor == null) { InternalAggregations aggs = search(); if (aggs == null) { @@ -121,11 +119,10 @@ public Result next() throws IOException { private InternalAggregations search() { LOGGER.debug("[{}] Executing aggregated search", context.jobId); - T searchRequest = buildSearchRequest(buildBaseSearchSource()); + ActionRequestBuilder searchRequest = buildSearchRequest(buildBaseSearchSource()); assert searchRequest.request().allowPartialSearchResults() == false; SearchResponse searchResponse = executeSearchRequest(searchRequest); try { - checkForSkippedClusters(searchResponse); LOGGER.debug("[{}] Search response was obtained", context.jobId); timingStatsReporter.reportSearchDuration(searchResponse.getTook()); return validateAggs(searchResponse.getAggregations()); @@ -136,37 +133,62 @@ private InternalAggregations search() { private void initAggregationProcessor(InternalAggregations aggs) throws IOException { aggregationToJsonProcessor = new AggregationToJsonProcessor( - context.timeField, + context.queryContext.timeField, context.fields, context.includeDocCount, - context.start, + context.queryContext.start, null ); aggregationToJsonProcessor.process(aggs); } - protected SearchResponse executeSearchRequest(T searchRequestBuilder) { - return ClientHelper.executeWithHeaders(context.headers, ClientHelper.ML_ORIGIN, client, searchRequestBuilder::get); + private SearchResponse executeSearchRequest(ActionRequestBuilder searchRequestBuilder) { + SearchResponse searchResponse = ClientHelper.executeWithHeaders( + context.queryContext.headers, + ClientHelper.ML_ORIGIN, + client, + searchRequestBuilder::get + ); + boolean success = false; + try { + DataExtractorUtils.checkForSkippedClusters(searchResponse); + success = true; + } finally { + if (success == false) { + searchResponse.decRef(); + } + } + return searchResponse; } private SearchSourceBuilder buildBaseSearchSource() { // For derivative aggregations the first bucket will always be null // so query one extra histogram bucket back and hope there is data // in that bucket - long histogramSearchStartTime = Math.max(0, context.start - DatafeedConfigUtils.getHistogramIntervalMillis(context.aggs)); + long histogramSearchStartTime = Math.max( + 0, + context.queryContext.start - DatafeedConfigUtils.getHistogramIntervalMillis(context.aggs) + ); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().size(0) - .query(DataExtractorUtils.wrapInTimeRangeQuery(context.query, context.timeField, histogramSearchStartTime, context.end)); + .query( + DataExtractorUtils.wrapInTimeRangeQuery( + context.queryContext.query, + context.queryContext.timeField, + histogramSearchStartTime, + context.queryContext.end + ) + ); - if (context.runtimeMappings.isEmpty() == false) { - searchSourceBuilder.runtimeMappings(context.runtimeMappings); + if (context.queryContext.runtimeMappings.isEmpty() == false) { + searchSourceBuilder.runtimeMappings(context.queryContext.runtimeMappings); } context.aggs.getAggregatorFactories().forEach(searchSourceBuilder::aggregation); context.aggs.getPipelineAggregatorFactories().forEach(searchSourceBuilder::aggregation); return searchSourceBuilder; } - protected abstract T buildSearchRequest(SearchSourceBuilder searchRequestBuilder); + protected abstract ActionRequestBuilder buildSearchRequest(SearchSourceBuilder searchRequestBuilder); private static InternalAggregations validateAggs(@Nullable InternalAggregations aggs) { if (aggs == null) { @@ -189,4 +211,18 @@ public AggregationDataExtractorContext getContext() { return context; } + @Override + public DataSummary getSummary() { + ActionRequestBuilder searchRequestBuilder = buildSearchRequest( + DataExtractorUtils.getSearchSourceBuilderForSummary(context.queryContext) + ); + SearchResponse searchResponse = executeSearchRequest(searchRequestBuilder); + try { + LOGGER.debug("[{}] Aggregating Data summary response was obtained", context.jobId); + timingStatsReporter.reportSearchDuration(searchResponse.getTook()); + return DataExtractorUtils.getDataSummary(searchResponse); + } finally { + searchResponse.decRef(); + } + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractor.java index 34ea3a1fad04e..0a41c4387634e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractor.java @@ -17,7 +17,7 @@ * stored and they are then processed in batches. Cancellation is supported between batches. * Note that this class is NOT thread-safe. */ -class AggregationDataExtractor extends AbstractAggregationDataExtractor { +class AggregationDataExtractor extends AbstractAggregationDataExtractor { AggregationDataExtractor( Client client, @@ -30,8 +30,8 @@ class AggregationDataExtractor extends AbstractAggregationDataExtractor fields; - final String[] indices; - final QueryBuilder query; final AggregatorFactories.Builder aggs; - final long start; - final long end; final boolean includeDocCount; - final Map headers; - final IndicesOptions indicesOptions; - final Map runtimeMappings; + final DataExtractorQueryContext queryContext; AggregationDataExtractorContext( String jobId, @@ -44,17 +37,19 @@ class AggregationDataExtractorContext { IndicesOptions indicesOptions, Map runtimeMappings ) { - this.jobId = Objects.requireNonNull(jobId); - this.timeField = Objects.requireNonNull(timeField); + this.jobId = jobId; this.fields = Objects.requireNonNull(fields); - this.indices = indices.toArray(new String[0]); - this.query = Objects.requireNonNull(query); this.aggs = Objects.requireNonNull(aggs); - this.start = start; - this.end = end; this.includeDocCount = includeDocCount; - this.headers = headers; - this.indicesOptions = Objects.requireNonNull(indicesOptions); - this.runtimeMappings = Objects.requireNonNull(runtimeMappings); + this.queryContext = new DataExtractorQueryContext( + indices, + query, + Objects.requireNonNull(timeField), + start, + end, + headers, + indicesOptions, + runtimeMappings + ); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractor.java index 0dfdd9897737e..e4712d051ef1e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractor.java @@ -48,6 +48,9 @@ class CompositeAggregationDataExtractor implements DataExtractor { private static final Logger LOGGER = LogManager.getLogger(CompositeAggregationDataExtractor.class); + private static final String EARLIEST_TIME = "earliest_time"; + private static final String LATEST_TIME = "latest_time"; + private volatile Map afterKey = null; private final CompositeAggregationBuilder compositeAggregationBuilder; private final Client client; @@ -98,7 +101,7 @@ public void destroy() { @Override public long getEndTime() { - return context.end; + return context.queryContext.end; } @Override @@ -107,7 +110,7 @@ public Result next() throws IOException { throw new NoSuchElementException(); } - SearchInterval searchInterval = new SearchInterval(context.start, context.end); + SearchInterval searchInterval = new SearchInterval(context.queryContext.start, context.queryContext.end); InternalAggregations aggs = search(); if (aggs == null) { LOGGER.trace(() -> "[" + context.jobId + "] extraction finished"); @@ -125,13 +128,25 @@ private InternalAggregations search() { // Also, it doesn't make sense to have a derivative when grouping by time AND by some other criteria. LOGGER.trace( - () -> format("[%s] Executing composite aggregated search from [%s] to [%s]", context.jobId, context.start, context.end) + () -> format( + "[%s] Executing composite aggregated search from [%s] to [%s]", + context.jobId, + context.queryContext.start, + context.queryContext.end + ) ); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().size(0) - .query(DataExtractorUtils.wrapInTimeRangeQuery(context.query, context.timeField, context.start, context.end)); + .query( + DataExtractorUtils.wrapInTimeRangeQuery( + context.queryContext.query, + context.queryContext.timeField, + context.queryContext.start, + context.queryContext.end + ) + ); - if (context.runtimeMappings.isEmpty() == false) { - searchSourceBuilder.runtimeMappings(context.runtimeMappings); + if (context.queryContext.runtimeMappings.isEmpty() == false) { + searchSourceBuilder.runtimeMappings(context.queryContext.runtimeMappings); } if (afterKey != null) { compositeAggregationBuilder.aggregateAfter(afterKey); @@ -156,16 +171,16 @@ private InternalAggregations search() { } } - protected SearchResponse executeSearchRequest(ActionRequestBuilder searchRequestBuilder) { + private SearchResponse executeSearchRequest(ActionRequestBuilder searchRequestBuilder) { SearchResponse searchResponse = ClientHelper.executeWithHeaders( - context.headers, + context.queryContext.headers, ClientHelper.ML_ORIGIN, client, searchRequestBuilder::get ); boolean success = false; try { - checkForSkippedClusters(searchResponse); + DataExtractorUtils.checkForSkippedClusters(searchResponse); success = true; } finally { if (success == false) { @@ -177,10 +192,10 @@ protected SearchResponse executeSearchRequest(ActionRequestBuilder searchRequestBuilder = DataExtractorUtils.getSearchRequestBuilderForSummary( + client, + context.queryContext + ); + SearchResponse searchResponse = executeSearchRequest(searchRequestBuilder); + try { + LOGGER.debug("[{}] Aggregating Data summary response was obtained", context.jobId); + timingStatsReporter.reportSearchDuration(searchResponse.getTook()); + return DataExtractorUtils.getDataSummary(searchResponse); + } finally { + searchResponse.decRef(); + } + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractorContext.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractorContext.java index 5fd5b58c5556d..75531e68d9738 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractorContext.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractorContext.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; +import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorQueryContext; import java.util.List; import java.util.Map; @@ -18,18 +19,11 @@ class CompositeAggregationDataExtractorContext { final String jobId; - final String timeField; final Set fields; - final String[] indices; - final QueryBuilder query; final CompositeAggregationBuilder compositeAggregationBuilder; - final long start; - final long end; final boolean includeDocCount; - final Map headers; - final IndicesOptions indicesOptions; - final Map runtimeMappings; final String compositeAggDateHistogramGroupSourceName; + final DataExtractorQueryContext queryContext; CompositeAggregationDataExtractorContext( String jobId, @@ -47,17 +41,19 @@ class CompositeAggregationDataExtractorContext { Map runtimeMappings ) { this.jobId = Objects.requireNonNull(jobId); - this.timeField = Objects.requireNonNull(timeField); this.fields = Objects.requireNonNull(fields); - this.indices = indices.toArray(new String[0]); - this.query = Objects.requireNonNull(query); this.compositeAggregationBuilder = Objects.requireNonNull(compositeAggregationBuilder); this.compositeAggDateHistogramGroupSourceName = Objects.requireNonNull(compositeAggDateHistogramGroupSourceName); - this.start = start; - this.end = end; this.includeDocCount = includeDocCount; - this.headers = headers; - this.indicesOptions = Objects.requireNonNull(indicesOptions); - this.runtimeMappings = Objects.requireNonNull(runtimeMappings); + this.queryContext = new DataExtractorQueryContext( + indices, + query, + Objects.requireNonNull(timeField), + start, + end, + headers, + indicesOptions, + runtimeMappings + ); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/RollupDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/RollupDataExtractor.java index 1503e93e5c11b..89a137807959d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/RollupDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/RollupDataExtractor.java @@ -18,7 +18,7 @@ * stored and they are then processed in batches. Cancellation is supported between batches. * Note that this class is NOT thread-safe. */ -class RollupDataExtractor extends AbstractAggregationDataExtractor { +class RollupDataExtractor extends AbstractAggregationDataExtractor { RollupDataExtractor( Client client, @@ -30,8 +30,8 @@ class RollupDataExtractor extends AbstractAggregationDataExtractor The chunk span can be either specified or not. When not specified, - * a heuristic is employed (see {@link DataSummary#estimateChunk()}) to automatically determine the chunk span. - * The search is set up (see {@link #setUpChunkedSearch()} by querying a data summary for the given time range + * a heuristic is employed (see {@link #setUpChunkedSearch()}) to automatically determine the chunk span. + * The search is set up by querying a data summary for the given time range * that includes the number of total hits and the earliest/latest times. Those are then used to determine the chunk span, * when necessary, and to jump the search forward to the time where the earliest data can be found. * If a search for a chunk returns empty, the set up is performed again for the remaining time. @@ -50,49 +35,30 @@ */ public class ChunkedDataExtractor implements DataExtractor { - interface DataSummary { - long estimateChunk(); - - boolean hasData(); - - long earliestTime(); - - long getDataTimeSpread(); - } - private static final Logger LOGGER = LogManager.getLogger(ChunkedDataExtractor.class); - private static final String EARLIEST_TIME = "earliest_time"; - private static final String LATEST_TIME = "latest_time"; - /** Let us set a minimum chunk span of 1 minute */ private static final long MIN_CHUNK_SPAN = 60000L; - private final Client client; private final DataExtractorFactory dataExtractorFactory; private final ChunkedDataExtractorContext context; - private final DataSummaryFactory dataSummaryFactory; - private final DatafeedTimingStatsReporter timingStatsReporter; private long currentStart; private long currentEnd; private long chunkSpan; private boolean isCancelled; private DataExtractor currentExtractor; - public ChunkedDataExtractor( - Client client, - DataExtractorFactory dataExtractorFactory, - ChunkedDataExtractorContext context, - DatafeedTimingStatsReporter timingStatsReporter - ) { - this.client = Objects.requireNonNull(client); + public ChunkedDataExtractor(DataExtractorFactory dataExtractorFactory, ChunkedDataExtractorContext context) { this.dataExtractorFactory = Objects.requireNonNull(dataExtractorFactory); this.context = Objects.requireNonNull(context); - this.timingStatsReporter = Objects.requireNonNull(timingStatsReporter); - this.currentStart = context.start; - this.currentEnd = context.start; + this.currentStart = context.start(); + this.currentEnd = context.start(); this.isCancelled = false; - this.dataSummaryFactory = new DataSummaryFactory(); + } + + @Override + public DataSummary getSummary() { + return null; } @Override @@ -101,7 +67,7 @@ public boolean hasNext() { if (isCancelled()) { return currentHasNext; } - return currentHasNext || currentEnd < context.end; + return currentHasNext || currentEnd < context.end(); } @Override @@ -119,47 +85,42 @@ public Result next() throws IOException { } private void setUpChunkedSearch() { - DataSummary dataSummary = dataSummaryFactory.buildDataSummary(); + DataSummary dataSummary = dataExtractorFactory.newExtractor(currentStart, context.end()).getSummary(); if (dataSummary.hasData()) { - currentStart = context.timeAligner.alignToFloor(dataSummary.earliestTime()); + currentStart = context.timeAligner().alignToFloor(dataSummary.earliestTime()); currentEnd = currentStart; - chunkSpan = context.chunkSpan == null ? dataSummary.estimateChunk() : context.chunkSpan.getMillis(); - chunkSpan = context.timeAligner.alignToCeil(chunkSpan); - LOGGER.debug( - "[{}] Chunked search configured: kind = {}, dataTimeSpread = {} ms, chunk span = {} ms", - context.jobId, - dataSummary.getClass().getSimpleName(), - dataSummary.getDataTimeSpread(), - chunkSpan - ); - } else { - // search is over - currentEnd = context.end; - LOGGER.debug("[{}] Chunked search configured: no data found", context.jobId); - } - } - protected SearchResponse executeSearchRequest(ActionRequestBuilder searchRequestBuilder) { - SearchResponse searchResponse = ClientHelper.executeWithHeaders( - context.headers, - ClientHelper.ML_ORIGIN, - client, - searchRequestBuilder::get - ); - boolean success = false; - try { - checkForSkippedClusters(searchResponse); - success = true; - } finally { - if (success == false) { - searchResponse.decRef(); + if (context.chunkSpan() != null) { + chunkSpan = context.chunkSpan().getMillis(); + } else if (context.hasAggregations()) { + // This heuristic is a direct copy of the manual chunking config auto-creation done in {@link DatafeedConfig} + chunkSpan = DatafeedConfig.DEFAULT_AGGREGATION_CHUNKING_BUCKETS * context.histogramInterval(); + } else { + long timeSpread = dataSummary.latestTime() - dataSummary.earliestTime(); + if (timeSpread <= 0) { + chunkSpan = context.end() - currentEnd; + } else { + // The heuristic here is that we want a time interval where we expect roughly scrollSize documents + // (assuming data are uniformly spread over time). + // We have totalHits documents over dataTimeSpread (latestTime - earliestTime), we want scrollSize documents over chunk. + // Thus, the interval would be (scrollSize * dataTimeSpread) / totalHits. + // However, assuming this as the chunk span may often lead to half-filled pages or empty searches. + // It is beneficial to take a multiple of that. Based on benchmarking, we set this to 10x. + chunkSpan = Math.max(MIN_CHUNK_SPAN, 10 * (context.scrollSize() * timeSpread) / dataSummary.totalHits()); + } } + + chunkSpan = context.timeAligner().alignToCeil(chunkSpan); + LOGGER.debug("[{}] Chunked search configured: chunk span = {} ms", context.jobId(), chunkSpan); + } else { + // search is over + currentEnd = context.end(); + LOGGER.debug("[{}] Chunked search configured: no data found", context.jobId()); } - return searchResponse; } private Result getNextStream() throws IOException { - SearchInterval lastSearchInterval = new SearchInterval(context.start, context.end); + SearchInterval lastSearchInterval = new SearchInterval(context.start(), context.end()); while (hasNext()) { boolean isNewSearch = false; @@ -202,9 +163,9 @@ private Result getNextStream() throws IOException { private void advanceTime() { currentStart = currentEnd; - currentEnd = Math.min(currentStart + chunkSpan, context.end); + currentEnd = Math.min(currentStart + chunkSpan, context.end()); currentExtractor = dataExtractorFactory.newExtractor(currentStart, currentEnd); - LOGGER.trace("[{}] advances time to [{}, {})", context.jobId, currentStart, currentEnd); + LOGGER.debug("[{}] advances time to [{}, {})", context.jobId(), currentStart, currentEnd); } @Override @@ -230,186 +191,10 @@ public void destroy() { @Override public long getEndTime() { - return context.end; + return context.end(); } ChunkedDataExtractorContext getContext() { return context; } - - private class DataSummaryFactory { - - /** - * If there are aggregations, an AggregatedDataSummary object is created. It returns a ScrollingDataSummary otherwise. - * - * By default a DatafeedConfig with aggregations, should already have a manual ChunkingConfig created. - * However, the end user could have specifically set the ChunkingConfig to AUTO, which would not really work for aggregations. - * So, if we need to gather an appropriate chunked time for aggregations, we can utilize the AggregatedDataSummary - * - * @return DataSummary object - */ - private DataSummary buildDataSummary() { - return context.hasAggregations ? newAggregatedDataSummary() : newScrolledDataSummary(); - } - - private DataSummary newScrolledDataSummary() { - SearchRequestBuilder searchRequestBuilder = rangeSearchRequest(); - - SearchResponse searchResponse = executeSearchRequest(searchRequestBuilder); - try { - LOGGER.debug("[{}] Scrolling Data summary response was obtained", context.jobId); - timingStatsReporter.reportSearchDuration(searchResponse.getTook()); - - long earliestTime = 0; - long latestTime = 0; - long totalHits = searchResponse.getHits().getTotalHits().value; - if (totalHits > 0) { - InternalAggregations aggregations = searchResponse.getAggregations(); - Min min = aggregations.get(EARLIEST_TIME); - earliestTime = (long) min.value(); - Max max = aggregations.get(LATEST_TIME); - latestTime = (long) max.value(); - } - return new ScrolledDataSummary(earliestTime, latestTime, totalHits); - } finally { - searchResponse.decRef(); - } - } - - private DataSummary newAggregatedDataSummary() { - // TODO: once RollupSearchAction is changed from indices:admin* to indices:data/read/* this branch is not needed - ActionRequestBuilder searchRequestBuilder = - dataExtractorFactory instanceof RollupDataExtractorFactory ? rollupRangeSearchRequest() : rangeSearchRequest(); - SearchResponse searchResponse = executeSearchRequest(searchRequestBuilder); - try { - LOGGER.debug("[{}] Aggregating Data summary response was obtained", context.jobId); - timingStatsReporter.reportSearchDuration(searchResponse.getTook()); - - InternalAggregations aggregations = searchResponse.getAggregations(); - // This can happen if all the indices the datafeed is searching are deleted after it started. - // Note that unlike the scrolled data summary method above we cannot check for this situation - // by checking for zero hits, because aggregations that work on rollups return zero hits even - // when they retrieve data. - if (aggregations == null) { - return AggregatedDataSummary.noDataSummary(context.histogramInterval); - } - Min min = aggregations.get(EARLIEST_TIME); - Max max = aggregations.get(LATEST_TIME); - return new AggregatedDataSummary(min.value(), max.value(), context.histogramInterval); - } finally { - searchResponse.decRef(); - } - } - - private SearchSourceBuilder rangeSearchBuilder() { - return new SearchSourceBuilder().size(0) - .query(DataExtractorUtils.wrapInTimeRangeQuery(context.query, context.timeField, currentStart, context.end)) - .runtimeMappings(context.runtimeMappings) - .aggregation(AggregationBuilders.min(EARLIEST_TIME).field(context.timeField)) - .aggregation(AggregationBuilders.max(LATEST_TIME).field(context.timeField)); - } - - private SearchRequestBuilder rangeSearchRequest() { - return new SearchRequestBuilder(client).setIndices(context.indices) - .setIndicesOptions(context.indicesOptions) - .setSource(rangeSearchBuilder()) - .setAllowPartialSearchResults(false) - .setTrackTotalHits(true); - } - - private RollupSearchAction.RequestBuilder rollupRangeSearchRequest() { - SearchRequest searchRequest = new SearchRequest().indices(context.indices) - .indicesOptions(context.indicesOptions) - .allowPartialSearchResults(false) - .source(rangeSearchBuilder()); - return new RollupSearchAction.RequestBuilder(client, searchRequest); - } - } - - private class ScrolledDataSummary implements DataSummary { - - private final long earliestTime; - private final long latestTime; - private final long totalHits; - - private ScrolledDataSummary(long earliestTime, long latestTime, long totalHits) { - this.earliestTime = earliestTime; - this.latestTime = latestTime; - this.totalHits = totalHits; - } - - @Override - public long earliestTime() { - return earliestTime; - } - - @Override - public long getDataTimeSpread() { - return latestTime - earliestTime; - } - - /** - * The heuristic here is that we want a time interval where we expect roughly scrollSize documents - * (assuming data are uniformly spread over time). - * We have totalHits documents over dataTimeSpread (latestTime - earliestTime), we want scrollSize documents over chunk. - * Thus, the interval would be (scrollSize * dataTimeSpread) / totalHits. - * However, assuming this as the chunk span may often lead to half-filled pages or empty searches. - * It is beneficial to take a multiple of that. Based on benchmarking, we set this to 10x. - */ - @Override - public long estimateChunk() { - long dataTimeSpread = getDataTimeSpread(); - if (totalHits <= 0 || dataTimeSpread <= 0) { - return context.end - currentEnd; - } - long estimatedChunk = 10 * (context.scrollSize * getDataTimeSpread()) / totalHits; - return Math.max(estimatedChunk, MIN_CHUNK_SPAN); - } - - @Override - public boolean hasData() { - return totalHits > 0; - } - } - - static class AggregatedDataSummary implements DataSummary { - - private final double earliestTime; - private final double latestTime; - private final long histogramIntervalMillis; - - static AggregatedDataSummary noDataSummary(long histogramInterval) { - // hasData() uses infinity to mean no data - return new AggregatedDataSummary(Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, histogramInterval); - } - - AggregatedDataSummary(double earliestTime, double latestTime, long histogramInterval) { - this.earliestTime = earliestTime; - this.latestTime = latestTime; - this.histogramIntervalMillis = histogramInterval; - } - - /** - * This heuristic is a direct copy of the manual chunking config auto-creation done in {@link DatafeedConfig} - */ - @Override - public long estimateChunk() { - return DatafeedConfig.DEFAULT_AGGREGATION_CHUNKING_BUCKETS * histogramIntervalMillis; - } - - @Override - public boolean hasData() { - return (Double.isInfinite(earliestTime) || Double.isInfinite(latestTime)) == false; - } - - @Override - public long earliestTime() { - return (long) earliestTime; - } - - @Override - public long getDataTimeSpread() { - return (long) latestTime - (long) earliestTime; - } - } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorContext.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorContext.java index 2989ddb40d370..465c97c38372b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorContext.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorContext.java @@ -6,67 +6,21 @@ */ package org.elasticsearch.xpack.ml.datafeed.extractor.chunked; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.query.QueryBuilder; - -import java.util.List; -import java.util.Map; -import java.util.Objects; - -class ChunkedDataExtractorContext { +record ChunkedDataExtractorContext( + String jobId, + int scrollSize, + long start, + long end, + TimeValue chunkSpan, + TimeAligner timeAligner, + boolean hasAggregations, + Long histogramInterval +) { interface TimeAligner { long alignToFloor(long value); long alignToCeil(long value); } - - final String jobId; - final String timeField; - final String[] indices; - final QueryBuilder query; - final int scrollSize; - final long start; - final long end; - final TimeValue chunkSpan; - final TimeAligner timeAligner; - final Map headers; - final boolean hasAggregations; - final Long histogramInterval; - final IndicesOptions indicesOptions; - final Map runtimeMappings; - - ChunkedDataExtractorContext( - String jobId, - String timeField, - List indices, - QueryBuilder query, - int scrollSize, - long start, - long end, - @Nullable TimeValue chunkSpan, - TimeAligner timeAligner, - Map headers, - boolean hasAggregations, - @Nullable Long histogramInterval, - IndicesOptions indicesOptions, - Map runtimeMappings - ) { - this.jobId = Objects.requireNonNull(jobId); - this.timeField = Objects.requireNonNull(timeField); - this.indices = indices.toArray(new String[indices.size()]); - this.query = Objects.requireNonNull(query); - this.scrollSize = scrollSize; - this.start = start; - this.end = end; - this.chunkSpan = chunkSpan; - this.timeAligner = Objects.requireNonNull(timeAligner); - this.headers = headers; - this.hasAggregations = hasAggregations; - this.histogramInterval = histogramInterval; - this.indicesOptions = Objects.requireNonNull(indicesOptions); - this.runtimeMappings = Objects.requireNonNull(runtimeMappings); - } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactory.java index b4141ec632d3b..09414ba58aacb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactory.java @@ -6,14 +6,10 @@ */ package org.elasticsearch.xpack.ml.datafeed.extractor.chunked; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.utils.Intervals; -import org.elasticsearch.xpack.ml.datafeed.DatafeedTimingStatsReporter; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory; @@ -21,57 +17,37 @@ public class ChunkedDataExtractorFactory implements DataExtractorFactory { - private final Client client; private final DatafeedConfig datafeedConfig; - - private final QueryBuilder extraFilters; private final Job job; private final DataExtractorFactory dataExtractorFactory; private final NamedXContentRegistry xContentRegistry; - private final DatafeedTimingStatsReporter timingStatsReporter; public ChunkedDataExtractorFactory( - Client client, DatafeedConfig datafeedConfig, - QueryBuilder extraFilters, Job job, NamedXContentRegistry xContentRegistry, - DataExtractorFactory dataExtractorFactory, - DatafeedTimingStatsReporter timingStatsReporter + DataExtractorFactory dataExtractorFactory ) { - this.client = Objects.requireNonNull(client); this.datafeedConfig = Objects.requireNonNull(datafeedConfig); - this.extraFilters = extraFilters; this.job = Objects.requireNonNull(job); this.dataExtractorFactory = Objects.requireNonNull(dataExtractorFactory); this.xContentRegistry = xContentRegistry; - this.timingStatsReporter = Objects.requireNonNull(timingStatsReporter); } @Override public DataExtractor newExtractor(long start, long end) { - QueryBuilder queryBuilder = datafeedConfig.getParsedQuery(xContentRegistry); - if (extraFilters != null) { - queryBuilder = QueryBuilders.boolQuery().filter(queryBuilder).filter(extraFilters); - } ChunkedDataExtractorContext.TimeAligner timeAligner = newTimeAligner(); ChunkedDataExtractorContext dataExtractorContext = new ChunkedDataExtractorContext( job.getId(), - job.getDataDescription().getTimeField(), - datafeedConfig.getIndices(), - queryBuilder, datafeedConfig.getScrollSize(), timeAligner.alignToCeil(start), timeAligner.alignToFloor(end), datafeedConfig.getChunkingConfig().getTimeSpan(), timeAligner, - datafeedConfig.getHeaders(), datafeedConfig.hasAggregations(), - datafeedConfig.hasAggregations() ? datafeedConfig.getHistogramIntervalMillis(xContentRegistry) : null, - datafeedConfig.getIndicesOptions(), - datafeedConfig.getRuntimeMappings() + datafeedConfig.hasAggregations() ? datafeedConfig.getHistogramIntervalMillis(xContentRegistry) : null ); - return new ChunkedDataExtractor(client, dataExtractorFactory, dataExtractorContext, timingStatsReporter); + return new ChunkedDataExtractor(dataExtractorFactory, dataExtractorContext); } private ChunkedDataExtractorContext.TimeAligner newTimeAligner() { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java index 0caa59fae914b..5da89da6b3450 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java @@ -91,7 +91,7 @@ public void destroy() { @Override public long getEndTime() { - return context.end; + return context.queryContext.end; } @Override @@ -103,12 +103,12 @@ public Result next() throws IOException { if (stream.isPresent() == false) { hasNext = false; } - return new Result(new SearchInterval(context.start, context.end), stream); + return new Result(new SearchInterval(context.queryContext.start, context.queryContext.end), stream); } private Optional tryNextStream() throws IOException { try { - return scrollId == null ? Optional.ofNullable(initScroll(context.start)) : Optional.ofNullable(continueScroll()); + return scrollId == null ? Optional.ofNullable(initScroll(context.queryContext.start)) : Optional.ofNullable(continueScroll()); } catch (Exception e) { scrollId = null; if (searchHasShardFailure) { @@ -116,7 +116,7 @@ private Optional tryNextStream() throws IOException { } logger.debug("[{}] Resetting scroll search after shard failure", context.jobId); markScrollAsErrored(); - return Optional.ofNullable(initScroll(lastTimestamp == null ? context.start : lastTimestamp)); + return Optional.ofNullable(initScroll(lastTimestamp == null ? context.queryContext.start : lastTimestamp)); } } @@ -135,14 +135,14 @@ protected InputStream initScroll(long startTimestamp) throws IOException { protected SearchResponse executeSearchRequest(SearchRequestBuilder searchRequestBuilder) { SearchResponse searchResponse = ClientHelper.executeWithHeaders( - context.headers, + context.queryContext.headers, ClientHelper.ML_ORIGIN, client, searchRequestBuilder::get ); boolean success = false; try { - checkForSkippedClusters(searchResponse); + DataExtractorUtils.checkForSkippedClusters(searchResponse); success = true; } catch (ResourceNotFoundException e) { clearScrollLoggingExceptions(searchResponse.getScrollId()); @@ -158,12 +158,19 @@ protected SearchResponse executeSearchRequest(SearchRequestBuilder searchRequest private SearchRequestBuilder buildSearchRequest(long start) { SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().size(context.scrollSize) .sort(context.extractedFields.timeField(), SortOrder.ASC) - .query(DataExtractorUtils.wrapInTimeRangeQuery(context.query, context.extractedFields.timeField(), start, context.end)) - .runtimeMappings(context.runtimeMappings); + .query( + DataExtractorUtils.wrapInTimeRangeQuery( + context.queryContext.query, + context.extractedFields.timeField(), + start, + context.queryContext.end + ) + ) + .runtimeMappings(context.queryContext.runtimeMappings); SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client).setScroll(SCROLL_TIMEOUT) - .setIndices(context.indices) - .setIndicesOptions(context.indicesOptions) + .setIndices(context.queryContext.indices) + .setIndicesOptions(context.queryContext.indicesOptions) .setAllowPartialSearchResults(false) .setSource(searchSourceBuilder); @@ -228,7 +235,9 @@ private InputStream continueScroll() throws IOException { } logger.debug("[{}] search failed due to SearchPhaseExecutionException. Will attempt again with new scroll", context.jobId); markScrollAsErrored(); - searchResponse = executeSearchRequest(buildSearchRequest(lastTimestamp == null ? context.start : lastTimestamp)); + searchResponse = executeSearchRequest( + buildSearchRequest(lastTimestamp == null ? context.queryContext.start : lastTimestamp) + ); } logger.debug("[{}] Search response was obtained", context.jobId); timingStatsReporter.reportSearchDuration(searchResponse.getTook()); @@ -254,14 +263,14 @@ void markScrollAsErrored() { @SuppressWarnings("HiddenField") protected SearchResponse executeSearchScrollRequest(String scrollId) { SearchResponse searchResponse = ClientHelper.executeWithHeaders( - context.headers, + context.queryContext.headers, ClientHelper.ML_ORIGIN, client, () -> new SearchScrollRequestBuilder(client).setScroll(SCROLL_TIMEOUT).setScrollId(scrollId).get() ); boolean success = false; try { - checkForSkippedClusters(searchResponse); + DataExtractorUtils.checkForSkippedClusters(searchResponse); success = true; } catch (ResourceNotFoundException e) { clearScrollLoggingExceptions(searchResponse.getScrollId()); @@ -294,11 +303,24 @@ private void innerClearScroll(String scrollId) { ClearScrollRequest request = new ClearScrollRequest(); request.addScrollId(scrollId); ClientHelper.executeWithHeaders( - context.headers, + context.queryContext.headers, ClientHelper.ML_ORIGIN, client, () -> client.execute(TransportClearScrollAction.TYPE, request).actionGet() ); } } + + @Override + public DataSummary getSummary() { + SearchRequestBuilder searchRequestBuilder = DataExtractorUtils.getSearchRequestBuilderForSummary(client, context.queryContext); + SearchResponse searchResponse = executeSearchRequest(searchRequestBuilder); + try { + logger.debug("[{}] Scrolling Data summary response was obtained", context.jobId); + timingStatsReporter.reportSearchDuration(searchResponse.getTook()); + return DataExtractorUtils.getDataSummary(searchResponse); + } finally { + searchResponse.decRef(); + } + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorContext.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorContext.java index 58c0c5b485742..776c7c252ffdf 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorContext.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorContext.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorQueryContext; import java.util.List; import java.util.Map; @@ -18,15 +19,9 @@ class ScrollDataExtractorContext { final String jobId; final TimeBasedExtractedFields extractedFields; - final String[] indices; - final QueryBuilder query; final List scriptFields; final int scrollSize; - final long start; - final long end; - final Map headers; - final IndicesOptions indicesOptions; - final Map runtimeMappings; + final DataExtractorQueryContext queryContext; ScrollDataExtractorContext( String jobId, @@ -41,16 +36,19 @@ class ScrollDataExtractorContext { IndicesOptions indicesOptions, Map runtimeMappings ) { - this.jobId = Objects.requireNonNull(jobId); + this.jobId = jobId; this.extractedFields = Objects.requireNonNull(extractedFields); - this.indices = indices.toArray(new String[indices.size()]); - this.query = Objects.requireNonNull(query); this.scriptFields = Objects.requireNonNull(scriptFields); this.scrollSize = scrollSize; - this.start = start; - this.end = end; - this.headers = headers; - this.indicesOptions = Objects.requireNonNull(indicesOptions); - this.runtimeMappings = Objects.requireNonNull(runtimeMappings); + this.queryContext = new DataExtractorQueryContext( + indices, + query, + extractedFields.timeField(), + start, + end, + headers, + indicesOptions, + runtimeMappings + ); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorFactoryTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorFactoryTests.java index 9a76eb5f2b936..2a086457ba755 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorFactoryTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorFactoryTests.java @@ -50,8 +50,8 @@ public void testNewExtractor_GivenAlignedTimes() { AggregationDataExtractor dataExtractor = (AggregationDataExtractor) factory.newExtractor(2000, 5000); - assertThat(dataExtractor.getContext().start, equalTo(2000L)); - assertThat(dataExtractor.getContext().end, equalTo(5000L)); + assertThat(dataExtractor.getContext().queryContext.start, equalTo(2000L)); + assertThat(dataExtractor.getContext().queryContext.end, equalTo(5000L)); } public void testNewExtractor_GivenNonAlignedTimes() { @@ -59,8 +59,8 @@ public void testNewExtractor_GivenNonAlignedTimes() { AggregationDataExtractor dataExtractor = (AggregationDataExtractor) factory.newExtractor(3980, 9200); - assertThat(dataExtractor.getContext().start, equalTo(4000L)); - assertThat(dataExtractor.getContext().end, equalTo(9000L)); + assertThat(dataExtractor.getContext().queryContext.start, equalTo(4000L)); + assertThat(dataExtractor.getContext().queryContext.end, equalTo(9000L)); } private AggregationDataExtractorFactory createFactory(long histogramInterval) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorTests.java index 5b2cd8f78d02e..02e33695ff7e2 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorTests.java @@ -6,28 +6,35 @@ */ package org.elasticsearch.xpack.ml.datafeed.extractor.aggregation; +import org.apache.lucene.search.TotalHits; +import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram; +import org.elasticsearch.search.aggregations.metrics.Max; +import org.elasticsearch.search.aggregations.metrics.Min; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.ml.datafeed.DatafeedTimingStats; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.datafeed.SearchInterval; import org.elasticsearch.xpack.ml.datafeed.DatafeedTimingStatsReporter; -import org.elasticsearch.xpack.ml.datafeed.DatafeedTimingStatsReporter.DatafeedTimingStatsPersister; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.ml.datafeed.extractor.aggregation.AggregationTestUtils.Term; import org.junit.Before; +import org.mockito.ArgumentCaptor; import java.io.BufferedReader; import java.io.IOException; @@ -50,14 +57,17 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.stringContainsInOrder; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class AggregationDataExtractorTests extends ESTestCase { - private Client testClient; - private List capturedSearchRequests; + private Client client; private String jobId; private String timeField; private Set fields; @@ -67,37 +77,12 @@ public class AggregationDataExtractorTests extends ESTestCase { private DatafeedTimingStatsReporter timingStatsReporter; private Map runtimeMappings; - private class TestDataExtractor extends AggregationDataExtractor { - - private SearchResponse nextResponse; - private SearchPhaseExecutionException ex; - - TestDataExtractor(long start, long end) { - super(testClient, createContext(start, end), timingStatsReporter); - } - - @Override - protected SearchResponse executeSearchRequest(SearchRequestBuilder searchRequestBuilder) { - capturedSearchRequests.add(searchRequestBuilder); - if (ex != null) { - throw ex; - } - return nextResponse; - } - - void setNextResponse(SearchResponse searchResponse) { - nextResponse = searchResponse; - } - - void setNextResponseToError(SearchPhaseExecutionException ex) { - this.ex = ex; - } - } - @Before public void setUpTests() { - testClient = mock(Client.class); - capturedSearchRequests = new ArrayList<>(); + client = mock(Client.class); + when(client.threadPool()).thenReturn(mock(ThreadPool.class)); + when(client.threadPool().getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); + jobId = "test-job"; timeField = "time"; fields = new HashSet<>(); @@ -115,7 +100,7 @@ public void setUpTests() { ) ); runtimeMappings = Collections.emptyMap(); - timingStatsReporter = new DatafeedTimingStatsReporter(new DatafeedTimingStats(jobId), mock(DatafeedTimingStatsPersister.class)); + timingStatsReporter = mock(DatafeedTimingStatsReporter.class); } public void testExtraction() throws IOException { @@ -139,10 +124,11 @@ public void testExtraction() throws IOException { ) ); - TestDataExtractor extractor = new TestDataExtractor(1000L, 4000L); + AggregationDataExtractor extractor = new AggregationDataExtractor(client, createContext(1000L, 4000L), timingStatsReporter); - SearchResponse response = createSearchResponse("time", histogramBuckets); - extractor.setNextResponse(response); + ArgumentCaptor searchRequestCaptor = ArgumentCaptor.forClass(SearchRequest.class); + ActionFuture searchResponse = toActionFuture(createSearchResponse("time", histogramBuckets)); + when(client.execute(eq(TransportSearchAction.TYPE), searchRequestCaptor.capture())).thenReturn(searchResponse); assertThat(extractor.hasNext(), is(true)); DataExtractor.Result result = extractor.next(); @@ -156,9 +142,8 @@ public void testExtraction() throws IOException { {"time":3999,"airline":"b","responsetime":32.0,"doc_count":3}"""; assertThat(asString(stream.get()), equalTo(expectedStream)); assertThat(extractor.hasNext(), is(false)); - assertThat(capturedSearchRequests.size(), equalTo(1)); - String searchRequest = capturedSearchRequests.get(0).toString().replaceAll("\\s", ""); + String searchRequest = searchRequestCaptor.getValue().toString().replaceAll("\\s", ""); assertThat(searchRequest, containsString("\"size\":0")); assertThat( searchRequest, @@ -175,45 +160,47 @@ public void testExtraction() throws IOException { } public void testExtractionGivenResponseHasNullAggs() throws IOException { - TestDataExtractor extractor = new TestDataExtractor(1000L, 2000L); + AggregationDataExtractor extractor = new AggregationDataExtractor(client, createContext(1000L, 2000L), timingStatsReporter); - SearchResponse response = createSearchResponse(null); - extractor.setNextResponse(response); + ActionFuture searchResponse = toActionFuture(createSearchResponse(null)); + when(client.execute(eq(TransportSearchAction.TYPE), any())).thenReturn(searchResponse); assertThat(extractor.hasNext(), is(true)); assertThat(extractor.next().data().isPresent(), is(false)); assertThat(extractor.hasNext(), is(false)); - assertThat(capturedSearchRequests.size(), equalTo(1)); + verify(client).execute(eq(TransportSearchAction.TYPE), any()); } public void testExtractionGivenResponseHasEmptyAggs() throws IOException { - TestDataExtractor extractor = new TestDataExtractor(1000L, 2000L); + AggregationDataExtractor extractor = new AggregationDataExtractor(client, createContext(1000L, 2000L), timingStatsReporter); + InternalAggregations emptyAggs = AggregationTestUtils.createAggs(Collections.emptyList()); - SearchResponse response = createSearchResponse(emptyAggs); - extractor.setNextResponse(response); + ActionFuture searchResponse = toActionFuture(createSearchResponse(emptyAggs)); + when(client.execute(eq(TransportSearchAction.TYPE), any())).thenReturn(searchResponse); assertThat(extractor.hasNext(), is(true)); assertThat(extractor.next().data().isPresent(), is(false)); assertThat(extractor.hasNext(), is(false)); - assertThat(capturedSearchRequests.size(), equalTo(1)); + verify(client).execute(eq(TransportSearchAction.TYPE), any()); } public void testExtractionGivenResponseHasEmptyHistogramAgg() throws IOException { - TestDataExtractor extractor = new TestDataExtractor(1000L, 2000L); - SearchResponse response = createSearchResponse("time", Collections.emptyList()); - extractor.setNextResponse(response); + AggregationDataExtractor extractor = new AggregationDataExtractor(client, createContext(1000L, 2000L), timingStatsReporter); + + ActionFuture searchResponse = toActionFuture(createSearchResponse("time", Collections.emptyList())); + when(client.execute(eq(TransportSearchAction.TYPE), any())).thenReturn(searchResponse); assertThat(extractor.hasNext(), is(true)); assertThat(extractor.next().data().isPresent(), is(false)); assertThat(extractor.hasNext(), is(false)); - assertThat(capturedSearchRequests.size(), equalTo(1)); + verify(client).execute(eq(TransportSearchAction.TYPE), any()); } public void testExtractionGivenResponseHasMultipleTopLevelAggs() { - TestDataExtractor extractor = new TestDataExtractor(1000L, 2000L); + AggregationDataExtractor extractor = new AggregationDataExtractor(client, createContext(1000L, 2000L), timingStatsReporter); InternalHistogram histogram1 = mock(InternalHistogram.class); when(histogram1.getName()).thenReturn("hist_1"); @@ -221,8 +208,8 @@ public void testExtractionGivenResponseHasMultipleTopLevelAggs() { when(histogram2.getName()).thenReturn("hist_2"); InternalAggregations aggs = AggregationTestUtils.createAggs(Arrays.asList(histogram1, histogram2)); - SearchResponse response = createSearchResponse(aggs); - extractor.setNextResponse(response); + ActionFuture searchResponse = toActionFuture(createSearchResponse(aggs)); + when(client.execute(eq(TransportSearchAction.TYPE), any())).thenReturn(searchResponse); assertThat(extractor.hasNext(), is(true)); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, extractor::next); @@ -230,9 +217,10 @@ public void testExtractionGivenResponseHasMultipleTopLevelAggs() { } public void testExtractionGivenCancelBeforeNext() { - TestDataExtractor extractor = new TestDataExtractor(1000L, 4000L); - SearchResponse response = createSearchResponse("time", Collections.emptyList()); - extractor.setNextResponse(response); + AggregationDataExtractor extractor = new AggregationDataExtractor(client, createContext(1000L, 4000L), timingStatsReporter); + + ActionFuture searchResponse = toActionFuture(createSearchResponse("time", Collections.emptyList())); + when(client.execute(eq(TransportSearchAction.TYPE), any())).thenReturn(searchResponse); extractor.cancel(); assertThat(extractor.hasNext(), is(false)); @@ -256,10 +244,10 @@ public void testExtractionGivenCancelHalfWay() throws IOException { timestamp += 1000L; } - TestDataExtractor extractor = new TestDataExtractor(1000L, timestamp + 1); + AggregationDataExtractor extractor = new AggregationDataExtractor(client, createContext(1000L, timestamp + 1), timingStatsReporter); - SearchResponse response = createSearchResponse("time", histogramBuckets); - extractor.setNextResponse(response); + ActionFuture searchResponse = toActionFuture(createSearchResponse("time", histogramBuckets)); + when(client.execute(eq(TransportSearchAction.TYPE), any())).thenReturn(searchResponse); assertThat(extractor.hasNext(), is(true)); assertThat(countMatches('{', asString(extractor.next().data().get())), equalTo(2400L)); @@ -277,23 +265,57 @@ public void testExtractionGivenCancelHalfWay() throws IOException { ); timestamp += 1000L; } - response = createSearchResponse("time", histogramBuckets); - extractor.setNextResponse(response); + searchResponse = toActionFuture(createSearchResponse("time", histogramBuckets)); + when(client.execute(eq(TransportSearchAction.TYPE), any())).thenReturn(searchResponse); extractor.cancel(); assertThat(extractor.hasNext(), is(false)); assertThat(extractor.isCancelled(), is(true)); - assertThat(capturedSearchRequests.size(), equalTo(1)); + verify(client).execute(eq(TransportSearchAction.TYPE), any()); } public void testExtractionGivenSearchResponseHasError() { - TestDataExtractor extractor = new TestDataExtractor(1000L, 2000L); - extractor.setNextResponseToError(new SearchPhaseExecutionException("phase 1", "boom", ShardSearchFailure.EMPTY_ARRAY)); + AggregationDataExtractor extractor = new AggregationDataExtractor(client, createContext(1000L, 2000L), timingStatsReporter); + when(client.execute(eq(TransportSearchAction.TYPE), any())).thenThrow( + new SearchPhaseExecutionException("phase 1", "boom", ShardSearchFailure.EMPTY_ARRAY) + ); assertThat(extractor.hasNext(), is(true)); expectThrows(SearchPhaseExecutionException.class, extractor::next); } + public void testGetSummary() { + AggregationDataExtractor extractor = new AggregationDataExtractor(client, createContext(1000L, 2300L), timingStatsReporter); + + ArgumentCaptor searchRequestCaptor = ArgumentCaptor.forClass(SearchRequest.class); + ActionFuture searchResponse = toActionFuture(createSummaryResponse(1001L, 2299L, 10L)); + when(client.execute(eq(TransportSearchAction.TYPE), searchRequestCaptor.capture())).thenReturn(searchResponse); + + DataExtractor.DataSummary summary = extractor.getSummary(); + assertThat(summary.earliestTime(), equalTo(1001L)); + assertThat(summary.latestTime(), equalTo(2299L)); + assertThat(summary.totalHits(), equalTo(10L)); + + String searchRequest = searchRequestCaptor.getValue().toString().replaceAll("\\s", ""); + assertThat(searchRequest, containsString("\"size\":0")); + assertThat( + searchRequest, + containsString( + "\"query\":{\"bool\":{\"filter\":[{\"match_all\":{\"boost\":1.0}}," + + "{\"range\":{\"time\":{\"gte\":1000,\"lt\":2300," + + "\"format\":\"epoch_millis\",\"boost\":1.0}}}]" + ) + ); + assertThat( + searchRequest, + containsString( + "\"aggregations\":{\"earliest_time\":{\"min\":{\"field\":\"time\"}}," + "\"latest_time\":{\"max\":{\"field\":\"time\"}}}}" + ) + ); + assertThat(searchRequest, not(containsString("\"track_total_hits\":false"))); + assertThat(searchRequest, not(containsString("\"sort\""))); + } + private AggregationDataExtractorContext createContext(long start, long end) { return new AggregationDataExtractorContext( jobId, @@ -311,7 +333,13 @@ private AggregationDataExtractorContext createContext(long start, long end) { ); } - @SuppressWarnings("unchecked") + private ActionFuture toActionFuture(T t) { + @SuppressWarnings("unchecked") + ActionFuture future = (ActionFuture) mock(ActionFuture.class); + when(future.actionGet()).thenReturn(t); + return future; + } + private SearchResponse createSearchResponse(String histogramName, List histogramBuckets) { InternalHistogram histogram = mock(InternalHistogram.class); when(histogram.getName()).thenReturn(histogramName); @@ -330,6 +358,17 @@ private SearchResponse createSearchResponse(InternalAggregations aggregations) { return searchResponse; } + private SearchResponse createSummaryResponse(long start, long end, long totalHits) { + SearchResponse searchResponse = mock(SearchResponse.class); + when(searchResponse.getHits()).thenReturn( + new SearchHits(SearchHits.EMPTY, new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), 1) + ); + when(searchResponse.getAggregations()).thenReturn( + InternalAggregations.from(List.of(new Min("earliest_time", start, null, null), new Max("latest_time", end, null, null))) + ); + return searchResponse; + } + private static String asString(InputStream inputStream) throws IOException { try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8))) { return reader.lines().collect(Collectors.joining("\n")); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractorTests.java index 6cc432dd4831f..5b9370d53e26e 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/CompositeAggregationDataExtractorTests.java @@ -6,13 +6,16 @@ */ package org.elasticsearch.xpack.ml.datafeed.extractor.aggregation; -import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.query.QueryBuilder; @@ -26,12 +29,12 @@ import org.elasticsearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.ml.datafeed.DatafeedTimingStats; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.datafeed.SearchInterval; import org.elasticsearch.xpack.ml.datafeed.DatafeedTimingStatsReporter; -import org.elasticsearch.xpack.ml.datafeed.DatafeedTimingStatsReporter.DatafeedTimingStatsPersister; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; import org.junit.Before; +import org.mockito.ArgumentCaptor; import java.io.BufferedReader; import java.io.IOException; @@ -55,13 +58,16 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.stringContainsInOrder; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class CompositeAggregationDataExtractorTests extends ESTestCase { - private Client testClient; - private List capturedSearchRequests; + private Client client; private String jobId; private String timeField; private Set fields; @@ -72,37 +78,12 @@ public class CompositeAggregationDataExtractorTests extends ESTestCase { private AggregatedSearchRequestBuilder aggregatedSearchRequestBuilder; private Map runtimeMappings; - private class TestDataExtractor extends CompositeAggregationDataExtractor { - - private SearchResponse nextResponse; - private SearchPhaseExecutionException ex; - - TestDataExtractor(long start, long end) { - super(compositeAggregationBuilder, testClient, createContext(start, end), timingStatsReporter, aggregatedSearchRequestBuilder); - } - - @Override - protected SearchResponse executeSearchRequest(ActionRequestBuilder searchRequestBuilder) { - capturedSearchRequests.add(searchRequestBuilder.request()); - if (ex != null) { - throw ex; - } - return nextResponse; - } - - void setNextResponse(SearchResponse searchResponse) { - nextResponse = searchResponse; - } - - void setNextResponseToError(SearchPhaseExecutionException ex) { - this.ex = ex; - } - } - @Before public void setUpTests() { - testClient = mock(Client.class); - capturedSearchRequests = new ArrayList<>(); + client = mock(Client.class); + when(client.threadPool()).thenReturn(mock(ThreadPool.class)); + when(client.threadPool().getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); + jobId = "test-job"; timeField = "time"; fields = new HashSet<>(); @@ -120,8 +101,8 @@ public void setUpTests() { .subAggregation(AggregationBuilders.max("time").field("time")) .subAggregation(AggregationBuilders.avg("responsetime").field("responsetime")); runtimeMappings = Collections.emptyMap(); - timingStatsReporter = new DatafeedTimingStatsReporter(new DatafeedTimingStats(jobId), mock(DatafeedTimingStatsPersister.class)); - aggregatedSearchRequestBuilder = (searchSourceBuilder) -> new SearchRequestBuilder(testClient).setSource(searchSourceBuilder) + timingStatsReporter = mock(DatafeedTimingStatsReporter.class); + aggregatedSearchRequestBuilder = (searchSourceBuilder) -> new SearchRequestBuilder(client).setSource(searchSourceBuilder) .setAllowPartialSearchResults(false) .setIndices(indices.toArray(String[]::new)); } @@ -159,10 +140,19 @@ public void testExtraction() throws IOException { ) ); - TestDataExtractor extractor = new TestDataExtractor(1000L, 4000L); + CompositeAggregationDataExtractor extractor = new CompositeAggregationDataExtractor( + compositeAggregationBuilder, + client, + createContext(1000L, 4000L), + timingStatsReporter, + aggregatedSearchRequestBuilder + ); - SearchResponse response = createSearchResponse("buckets", compositeBucket, Map.of("time_bucket", 4000L, "airline", "d")); - extractor.setNextResponse(response); + ArgumentCaptor searchRequestCaptor = ArgumentCaptor.forClass(SearchRequest.class); + ActionFuture searchResponse = toActionFuture( + createSearchResponse("buckets", compositeBucket, Map.of("time_bucket", 4000L, "airline", "d")) + ); + when(client.execute(eq(TransportSearchAction.TYPE), searchRequestCaptor.capture())).thenReturn(searchResponse); assertThat(extractor.hasNext(), is(true)); DataExtractor.Result result = extractor.next(); @@ -175,9 +165,8 @@ public void testExtraction() throws IOException { {"airline":"c","time":3999,"responsetime":31.0,"doc_count":4} \ {"airline":"b","time":3999,"responsetime":32.0,"doc_count":3}"""; assertThat(asString(stream.get()), equalTo(expectedStream)); - assertThat(capturedSearchRequests.size(), equalTo(1)); - String searchRequest = capturedSearchRequests.get(0).toString().replaceAll("\\s", ""); + String searchRequest = searchRequestCaptor.getValue().toString().replaceAll("\\s", ""); assertThat(searchRequest, containsString("\"size\":0")); assertThat( searchRequest, @@ -194,35 +183,57 @@ public void testExtraction() throws IOException { } public void testExtractionGivenResponseHasNullAggs() throws IOException { - TestDataExtractor extractor = new TestDataExtractor(1000L, 2000L); + CompositeAggregationDataExtractor extractor = new CompositeAggregationDataExtractor( + compositeAggregationBuilder, + client, + createContext(1000L, 2000L), + timingStatsReporter, + aggregatedSearchRequestBuilder + ); - SearchResponse response = createSearchResponse(null); - extractor.setNextResponse(response); + ActionFuture searchResponse = toActionFuture(createSearchResponse(null)); + when(client.execute(eq(TransportSearchAction.TYPE), any())).thenReturn(searchResponse); assertThat(extractor.hasNext(), is(true)); assertThat(extractor.next().data().isPresent(), is(false)); assertThat(extractor.hasNext(), is(false)); - assertThat(capturedSearchRequests.size(), equalTo(1)); + verify(client).execute(eq(TransportSearchAction.TYPE), any()); } public void testExtractionGivenResponseHasEmptyAggs() throws IOException { - TestDataExtractor extractor = new TestDataExtractor(1000L, 2000L); + CompositeAggregationDataExtractor extractor = new CompositeAggregationDataExtractor( + compositeAggregationBuilder, + client, + createContext(1000L, 2000L), + timingStatsReporter, + aggregatedSearchRequestBuilder + ); + InternalAggregations emptyAggs = AggregationTestUtils.createAggs(Collections.emptyList()); - SearchResponse response = createSearchResponse(emptyAggs); - extractor.setNextResponse(response); + ActionFuture searchResponse = toActionFuture(createSearchResponse(emptyAggs)); + when(client.execute(eq(TransportSearchAction.TYPE), any())).thenReturn(searchResponse); assertThat(extractor.hasNext(), is(true)); assertThat(extractor.next().data().isPresent(), is(false)); assertThat(extractor.hasNext(), is(false)); - assertThat(capturedSearchRequests.size(), equalTo(1)); + verify(client).execute(eq(TransportSearchAction.TYPE), any()); } public void testExtractionGivenCancelBeforeNext() { - TestDataExtractor extractor = new TestDataExtractor(1000L, 4000L); - SearchResponse response = createSearchResponse("time", Collections.emptyList(), Collections.emptyMap()); - extractor.setNextResponse(response); + CompositeAggregationDataExtractor extractor = new CompositeAggregationDataExtractor( + compositeAggregationBuilder, + client, + createContext(1000L, 4000L), + timingStatsReporter, + aggregatedSearchRequestBuilder + ); + + ActionFuture searchResponse = toActionFuture( + createSearchResponse("time", Collections.emptyList(), Collections.emptyMap()) + ); + when(client.execute(eq(TransportSearchAction.TYPE), any())).thenReturn(searchResponse); extractor.cancel(); // Composite aggs should be true because we need to make sure the first search has occurred or not @@ -245,10 +256,19 @@ public void testExtractionCancelOnFirstPage() throws IOException { ); } - TestDataExtractor extractor = new TestDataExtractor(1000L, timestamp + 1000 + 1); + CompositeAggregationDataExtractor extractor = new CompositeAggregationDataExtractor( + compositeAggregationBuilder, + client, + createContext(1000L, timestamp + 1000 + 1), + timingStatsReporter, + aggregatedSearchRequestBuilder + ); + + ActionFuture searchResponse = toActionFuture( + createSearchResponse("buckets", buckets, Map.of("time_bucket", 1000L, "airline", "d")) + ); + when(client.execute(eq(TransportSearchAction.TYPE), any())).thenReturn(searchResponse); - SearchResponse response = createSearchResponse("buckets", buckets, Map.of("time_bucket", 1000L, "airline", "d")); - extractor.setNextResponse(response); extractor.cancel(); // We should have next right now as we have not yet determined if we have handled a page or not assertThat(extractor.hasNext(), is(true)); @@ -274,10 +294,18 @@ public void testExtractionGivenCancelHalfWay() throws IOException { ); } - TestDataExtractor extractor = new TestDataExtractor(1000L, timestamp + 1000 + 1); + CompositeAggregationDataExtractor extractor = new CompositeAggregationDataExtractor( + compositeAggregationBuilder, + client, + createContext(1000L, timestamp + 1000 + 1), + timingStatsReporter, + aggregatedSearchRequestBuilder + ); - SearchResponse response = createSearchResponse("buckets", buckets, Map.of("time_bucket", 1000L, "airline", "d")); - extractor.setNextResponse(response); + ActionFuture searchResponse = toActionFuture( + createSearchResponse("buckets", buckets, Map.of("time_bucket", 1000L, "airline", "d")) + ); + when(client.execute(eq(TransportSearchAction.TYPE), any())).thenReturn(searchResponse); assertThat(extractor.hasNext(), is(true)); assertThat(countMatches('{', asString(extractor.next().data().get())), equalTo(10L)); @@ -305,8 +333,10 @@ public void testExtractionGivenCancelHalfWay() throws IOException { ) ); } - response = createSearchResponse("buckets", buckets, Map.of("time_bucket", 3000L, "airline", "a")); - extractor.setNextResponse(response); + + searchResponse = toActionFuture(createSearchResponse("buckets", buckets, Map.of("time_bucket", 3000L, "airline", "a"))); + when(client.execute(eq(TransportSearchAction.TYPE), any())).thenReturn(searchResponse); + extractor.cancel(); assertThat(extractor.hasNext(), is(true)); assertThat(extractor.isCancelled(), is(true)); @@ -315,12 +345,22 @@ public void testExtractionGivenCancelHalfWay() throws IOException { // Once we have handled the 6 remaining in that time bucket, we shouldn't finish the page and the extractor should end assertThat(extractor.hasNext(), is(false)); - assertThat(capturedSearchRequests.size(), equalTo(2)); + + verify(client, times(2)).execute(eq(TransportSearchAction.TYPE), any()); } public void testExtractionGivenSearchResponseHasError() { - TestDataExtractor extractor = new TestDataExtractor(1000L, 2000L); - extractor.setNextResponseToError(new SearchPhaseExecutionException("phase 1", "boom", ShardSearchFailure.EMPTY_ARRAY)); + CompositeAggregationDataExtractor extractor = new CompositeAggregationDataExtractor( + compositeAggregationBuilder, + client, + createContext(1000L, 2000L), + timingStatsReporter, + aggregatedSearchRequestBuilder + ); + + when(client.execute(eq(TransportSearchAction.TYPE), any())).thenThrow( + new SearchPhaseExecutionException("phase 1", "boom", ShardSearchFailure.EMPTY_ARRAY) + ); assertThat(extractor.hasNext(), is(true)); expectThrows(SearchPhaseExecutionException.class, extractor::next); @@ -344,7 +384,13 @@ private CompositeAggregationDataExtractorContext createContext(long start, long ); } - @SuppressWarnings("unchecked") + private ActionFuture toActionFuture(T t) { + @SuppressWarnings("unchecked") + ActionFuture future = (ActionFuture) mock(ActionFuture.class); + when(future.actionGet()).thenReturn(t); + return future; + } + private SearchResponse createSearchResponse( String aggName, List buckets, diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactoryTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactoryTests.java index a7260d34a0136..878f49dbe77fe 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactoryTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorFactoryTests.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.ml.datafeed.extractor.chunked; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.AggregationBuilders; @@ -18,7 +17,6 @@ import org.elasticsearch.xpack.core.ml.job.config.DataDescription; import org.elasticsearch.xpack.core.ml.job.config.Detector; import org.elasticsearch.xpack.core.ml.job.config.Job; -import org.elasticsearch.xpack.ml.datafeed.DatafeedTimingStatsReporter; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory; import org.junit.Before; @@ -31,9 +29,7 @@ public class ChunkedDataExtractorFactoryTests extends ESTestCase { - private Client client; private DataExtractorFactory dataExtractorFactory; - private DatafeedTimingStatsReporter timingStatsReporter; @Override protected NamedXContentRegistry xContentRegistry() { @@ -43,9 +39,7 @@ protected NamedXContentRegistry xContentRegistry() { @Before public void setUpMocks() { - client = mock(Client.class); dataExtractorFactory = mock(DataExtractorFactory.class); - timingStatsReporter = mock(DatafeedTimingStatsReporter.class); } public void testNewExtractor_GivenAlignedTimes() { @@ -53,8 +47,8 @@ public void testNewExtractor_GivenAlignedTimes() { ChunkedDataExtractor dataExtractor = (ChunkedDataExtractor) factory.newExtractor(2000, 5000); - assertThat(dataExtractor.getContext().start, equalTo(2000L)); - assertThat(dataExtractor.getContext().end, equalTo(5000L)); + assertThat(dataExtractor.getContext().start(), equalTo(2000L)); + assertThat(dataExtractor.getContext().end(), equalTo(5000L)); } public void testNewExtractor_GivenNonAlignedTimes() { @@ -62,8 +56,8 @@ public void testNewExtractor_GivenNonAlignedTimes() { ChunkedDataExtractor dataExtractor = (ChunkedDataExtractor) factory.newExtractor(3980, 9200); - assertThat(dataExtractor.getContext().start, equalTo(4000L)); - assertThat(dataExtractor.getContext().end, equalTo(9000L)); + assertThat(dataExtractor.getContext().start(), equalTo(4000L)); + assertThat(dataExtractor.getContext().end(), equalTo(9000L)); } public void testIntervalTimeAligner() { @@ -111,13 +105,10 @@ private ChunkedDataExtractorFactory createFactory(long histogramInterval) { datafeedConfigBuilder.setParsedAggregations(aggs); datafeedConfigBuilder.setIndices(Arrays.asList("my_index")); return new ChunkedDataExtractorFactory( - client, datafeedConfigBuilder.build(), - null, jobBuilder.build(new Date()), xContentRegistry(), - dataExtractorFactory, - timingStatsReporter + dataExtractorFactory ); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorTests.java index 7c8d2572461d4..ce6cf92d4bd51 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorTests.java @@ -6,29 +6,13 @@ */ package org.elasticsearch.xpack.ml.datafeed.extractor.chunked; -import org.apache.lucene.search.TotalHits; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.search.aggregations.metrics.Max; -import org.elasticsearch.search.aggregations.metrics.Min; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.ml.datafeed.DatafeedTimingStats; import org.elasticsearch.xpack.core.ml.datafeed.SearchInterval; -import org.elasticsearch.xpack.ml.datafeed.DatafeedTimingStatsReporter; -import org.elasticsearch.xpack.ml.datafeed.DatafeedTimingStatsReporter.DatafeedTimingStatsPersister; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; +import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor.DataSummary; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory; import org.junit.Before; import org.mockito.Mockito; @@ -36,100 +20,62 @@ import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Optional; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class ChunkedDataExtractorTests extends ESTestCase { - private Client client; - private List capturedSearchRequests; private String jobId; - private String timeField; - private List indices; - private QueryBuilder query; private int scrollSize; private TimeValue chunkSpan; private DataExtractorFactory dataExtractorFactory; - private DatafeedTimingStatsReporter timingStatsReporter; - - private class TestDataExtractor extends ChunkedDataExtractor { - - private SearchResponse nextResponse; - private SearchPhaseExecutionException ex; - - TestDataExtractor(long start, long end) { - super(client, dataExtractorFactory, createContext(start, end), timingStatsReporter); - } - - TestDataExtractor(long start, long end, boolean hasAggregations, Long histogramInterval) { - super(client, dataExtractorFactory, createContext(start, end, hasAggregations, histogramInterval), timingStatsReporter); - } - - @Override - protected SearchResponse executeSearchRequest(ActionRequestBuilder searchRequestBuilder) { - capturedSearchRequests.add(searchRequestBuilder.request()); - if (ex != null) { - throw ex; - } - return nextResponse; - } - - void setNextResponse(SearchResponse searchResponse) { - nextResponse = searchResponse; - } - - void setNextResponseToError(SearchPhaseExecutionException ex) { - this.ex = ex; - } - } @Before public void setUpTests() { - client = mock(Client.class); - capturedSearchRequests = new ArrayList<>(); jobId = "test-job"; - timeField = "time"; - indices = Arrays.asList("index-1", "index-2"); scrollSize = 1000; chunkSpan = null; dataExtractorFactory = mock(DataExtractorFactory.class); - timingStatsReporter = new DatafeedTimingStatsReporter(new DatafeedTimingStats(jobId), mock(DatafeedTimingStatsPersister.class)); } public void testExtractionGivenNoData() throws IOException { - TestDataExtractor extractor = new TestDataExtractor(1000L, 2300L); - extractor.setNextResponse(createSearchResponse(0L, 0L, 0L)); + DataExtractor extractor = new ChunkedDataExtractor(dataExtractorFactory, createContext(1000L, 2300L)); + + DataExtractor summaryExtractor = new StubSubExtractor(new SearchInterval(1000L, 2300L), new DataSummary(null, null, 0L)); + when(dataExtractorFactory.newExtractor(1000L, 2300L)).thenReturn(summaryExtractor); assertThat(extractor.hasNext(), is(true)); assertThat(extractor.next().data().isPresent(), is(false)); assertThat(extractor.hasNext(), is(false)); + + verify(dataExtractorFactory).newExtractor(1000L, 2300L); Mockito.verifyNoMoreInteractions(dataExtractorFactory); } public void testExtractionGivenSpecifiedChunk() throws IOException { chunkSpan = TimeValue.timeValueSeconds(1); - TestDataExtractor extractor = new TestDataExtractor(1000L, 2300L); - extractor.setNextResponse(createSearchResponse(10L, 1000L, 2200L)); + DataExtractor extractor = new ChunkedDataExtractor(dataExtractorFactory, createContext(1000L, 2300L)); + + DataExtractor summaryExtractor = new StubSubExtractor(new SearchInterval(1000L, 2300L), new DataSummary(1000L, 2300L, 10L)); + when(dataExtractorFactory.newExtractor(1000L, 2300L)).thenReturn(summaryExtractor); InputStream inputStream1 = mock(InputStream.class); InputStream inputStream2 = mock(InputStream.class); InputStream inputStream3 = mock(InputStream.class); - DataExtractor subExtactor1 = new StubSubExtractor(new SearchInterval(1000L, 2000L), inputStream1, inputStream2); - when(dataExtractorFactory.newExtractor(1000L, 2000L)).thenReturn(subExtactor1); + DataExtractor subExtractor1 = new StubSubExtractor(new SearchInterval(1000L, 2000L), inputStream1, inputStream2); + when(dataExtractorFactory.newExtractor(1000L, 2000L)).thenReturn(subExtractor1); - DataExtractor subExtactor2 = new StubSubExtractor(new SearchInterval(2000L, 2300L), inputStream3); - when(dataExtractorFactory.newExtractor(2000L, 2300L)).thenReturn(subExtactor2); + DataExtractor subExtractor2 = new StubSubExtractor(new SearchInterval(2000L, 2300L), inputStream3); + when(dataExtractorFactory.newExtractor(2000L, 2300L)).thenReturn(subExtractor2); assertThat(extractor.hasNext(), is(true)); DataExtractor.Result result = extractor.next(); @@ -148,46 +94,31 @@ public void testExtractionGivenSpecifiedChunk() throws IOException { assertThat(result.searchInterval(), equalTo(new SearchInterval(2000L, 2300L))); assertThat(result.data().isPresent(), is(false)); + verify(dataExtractorFactory).newExtractor(1000L, 2300L); verify(dataExtractorFactory).newExtractor(1000L, 2000L); verify(dataExtractorFactory).newExtractor(2000L, 2300L); Mockito.verifyNoMoreInteractions(dataExtractorFactory); - - assertThat(capturedSearchRequests.size(), equalTo(1)); - String searchRequest = capturedSearchRequests.get(0).toString().replaceAll("\\s", ""); - assertThat(searchRequest, containsString("\"size\":0")); - assertThat( - searchRequest, - containsString( - "\"query\":{\"bool\":{\"filter\":[{\"match_all\":{\"boost\":1.0}}," - + "{\"range\":{\"time\":{\"gte\":1000,\"lt\":2300," - + "\"format\":\"epoch_millis\",\"boost\":1.0}}}]" - ) - ); - assertThat( - searchRequest, - containsString( - "\"aggregations\":{\"earliest_time\":{\"min\":{\"field\":\"time\"}}," + "\"latest_time\":{\"max\":{\"field\":\"time\"}}}}" - ) - ); - assertThat(searchRequest, not(containsString("\"track_total_hits\":false"))); - assertThat(searchRequest, not(containsString("\"sort\""))); } public void testExtractionGivenSpecifiedChunkAndAggs() throws IOException { chunkSpan = TimeValue.timeValueSeconds(1); - TestDataExtractor extractor = new TestDataExtractor(1000L, 2300L, true, 1000L); - // 0 hits with non-empty data is possible with rollups - extractor.setNextResponse(createSearchResponse(randomFrom(0L, 2L, 10000L), 1000L, 2200L)); + DataExtractor summaryExtractor = new StubSubExtractor( + new SearchInterval(1000L, 2300L), + new DataSummary(1000L, 2200L, randomFrom(0L, 2L, 10000L)) + ); + when(dataExtractorFactory.newExtractor(1000L, 2300L)).thenReturn(summaryExtractor); + + DataExtractor extractor = new ChunkedDataExtractor(dataExtractorFactory, createContext(1000L, 2300L, true, 200L)); InputStream inputStream1 = mock(InputStream.class); InputStream inputStream2 = mock(InputStream.class); InputStream inputStream3 = mock(InputStream.class); - DataExtractor subExtactor1 = new StubSubExtractor(new SearchInterval(1000L, 2000L), inputStream1, inputStream2); - when(dataExtractorFactory.newExtractor(1000L, 2000L)).thenReturn(subExtactor1); + DataExtractor subExtractor1 = new StubSubExtractor(new SearchInterval(1000L, 2000L), inputStream1, inputStream2); + when(dataExtractorFactory.newExtractor(1000L, 2000L)).thenReturn(subExtractor1); - DataExtractor subExtactor2 = new StubSubExtractor(new SearchInterval(2000L, 2300L), inputStream3); - when(dataExtractorFactory.newExtractor(2000L, 2300L)).thenReturn(subExtactor2); + DataExtractor subExtractor2 = new StubSubExtractor(new SearchInterval(2000L, 2300L), inputStream3); + when(dataExtractorFactory.newExtractor(2000L, 2300L)).thenReturn(subExtractor2); assertThat(extractor.hasNext(), is(true)); DataExtractor.Result result = extractor.next(); @@ -206,47 +137,31 @@ public void testExtractionGivenSpecifiedChunkAndAggs() throws IOException { assertThat(result.searchInterval(), equalTo(new SearchInterval(2000L, 2300L))); assertThat(result.data().isPresent(), is(false)); + verify(dataExtractorFactory).newExtractor(1000L, 2300L); verify(dataExtractorFactory).newExtractor(1000L, 2000L); verify(dataExtractorFactory).newExtractor(2000L, 2300L); Mockito.verifyNoMoreInteractions(dataExtractorFactory); - - assertThat(capturedSearchRequests.size(), equalTo(1)); - String searchRequest = capturedSearchRequests.get(0).toString().replaceAll("\\s", ""); - assertThat(searchRequest, containsString("\"size\":0")); - assertThat( - searchRequest, - containsString( - "\"query\":{\"bool\":{\"filter\":[{\"match_all\":{\"boost\":1.0}}," - + "{\"range\":{\"time\":{\"gte\":1000,\"lt\":2300," - + "\"format\":\"epoch_millis\",\"boost\":1.0}}}]" - ) - ); - assertThat( - searchRequest, - containsString( - "\"aggregations\":{\"earliest_time\":{\"min\":{\"field\":\"time\"}}," + "\"latest_time\":{\"max\":{\"field\":\"time\"}}}}" - ) - ); - assertThat(searchRequest, not(containsString("\"track_total_hits\":false"))); - assertThat(searchRequest, not(containsString("\"sort\""))); } public void testExtractionGivenAutoChunkAndAggs() throws IOException { chunkSpan = null; - TestDataExtractor extractor = new TestDataExtractor(100_000L, 450_000L, true, 200L); + DataExtractor summaryExtractor = new StubSubExtractor( + new SearchInterval(100_000L, 450_000L), + new DataSummary(100_000L, 400_000L, randomFrom(0L, 2L, 10000L)) + ); + when(dataExtractorFactory.newExtractor(100_000L, 450_000L)).thenReturn(summaryExtractor); - // 0 hits with non-empty data is possible with rollups - extractor.setNextResponse(createSearchResponse(randomFrom(0L, 2L, 10000L), 100_000L, 400_000L)); + DataExtractor extractor = new ChunkedDataExtractor(dataExtractorFactory, createContext(100_000L, 450_000L, true, 200L)); InputStream inputStream1 = mock(InputStream.class); InputStream inputStream2 = mock(InputStream.class); // 200 * 1_000 == 200_000 - DataExtractor subExtactor1 = new StubSubExtractor(new SearchInterval(100_000L, 300_000L), inputStream1); - when(dataExtractorFactory.newExtractor(100_000L, 300_000L)).thenReturn(subExtactor1); + DataExtractor subExtractor1 = new StubSubExtractor(new SearchInterval(100_000L, 300_000L), inputStream1); + when(dataExtractorFactory.newExtractor(100_000L, 300_000L)).thenReturn(subExtractor1); - DataExtractor subExtactor2 = new StubSubExtractor(new SearchInterval(300_000L, 450_000L), inputStream2); - when(dataExtractorFactory.newExtractor(300_000L, 450_000L)).thenReturn(subExtactor2); + DataExtractor subExtractor2 = new StubSubExtractor(new SearchInterval(300_000L, 450_000L), inputStream2); + when(dataExtractorFactory.newExtractor(300_000L, 450_000L)).thenReturn(subExtractor2); assertThat(extractor.hasNext(), is(true)); DataExtractor.Result result = extractor.next(); @@ -261,43 +176,47 @@ public void testExtractionGivenAutoChunkAndAggs() throws IOException { assertThat(result.data().isPresent(), is(false)); assertThat(extractor.hasNext(), is(false)); + verify(dataExtractorFactory).newExtractor(100_000L, 450_000L); verify(dataExtractorFactory).newExtractor(100_000L, 300_000L); verify(dataExtractorFactory).newExtractor(300_000L, 450_000L); Mockito.verifyNoMoreInteractions(dataExtractorFactory); - - assertThat(capturedSearchRequests.size(), equalTo(1)); } public void testExtractionGivenAutoChunkAndAggsAndNoData() throws IOException { chunkSpan = null; - TestDataExtractor extractor = new TestDataExtractor(100L, 500L, true, 200L); + DataExtractor summaryExtractor = new StubSubExtractor(new SearchInterval(100L, 500L), new DataSummary(null, null, 0L)); + when(dataExtractorFactory.newExtractor(100L, 500L)).thenReturn(summaryExtractor); - extractor.setNextResponse(createNullSearchResponse()); + DataExtractor extractor = new ChunkedDataExtractor(dataExtractorFactory, createContext(100L, 500L, true, 200L)); assertThat(extractor.next().data().isPresent(), is(false)); assertThat(extractor.hasNext(), is(false)); + verify(dataExtractorFactory).newExtractor(100L, 500L); Mockito.verifyNoMoreInteractions(dataExtractorFactory); - - assertThat(capturedSearchRequests.size(), equalTo(1)); } public void testExtractionGivenAutoChunkAndScrollSize1000() throws IOException { chunkSpan = null; scrollSize = 1000; - TestDataExtractor extractor = new TestDataExtractor(100000L, 450000L); // 300K millis * 1000 * 10 / 15K docs = 200000 - extractor.setNextResponse(createSearchResponse(15000L, 100000L, 400000L)); + DataExtractor summaryExtractor = new StubSubExtractor( + new SearchInterval(100000L, 450000L), + new DataSummary(100000L, 400000L, 15000L) + ); + when(dataExtractorFactory.newExtractor(100000L, 450000L)).thenReturn(summaryExtractor); + + DataExtractor extractor = new ChunkedDataExtractor(dataExtractorFactory, createContext(100000L, 450000L)); InputStream inputStream1 = mock(InputStream.class); InputStream inputStream2 = mock(InputStream.class); - DataExtractor subExtactor1 = new StubSubExtractor(new SearchInterval(100_000L, 300_000L), inputStream1); - when(dataExtractorFactory.newExtractor(100_000L, 300_000L)).thenReturn(subExtactor1); + DataExtractor subExtractor1 = new StubSubExtractor(new SearchInterval(100_000L, 300_000L), inputStream1); + when(dataExtractorFactory.newExtractor(100_000L, 300_000L)).thenReturn(subExtractor1); - DataExtractor subExtactor2 = new StubSubExtractor(new SearchInterval(300_000L, 450_000L), inputStream2); - when(dataExtractorFactory.newExtractor(300_000L, 450_000L)).thenReturn(subExtactor2); + DataExtractor subExtractor2 = new StubSubExtractor(new SearchInterval(300_000L, 450_000L), inputStream2); + when(dataExtractorFactory.newExtractor(300_000L, 450_000L)).thenReturn(subExtractor2); assertThat(extractor.hasNext(), is(true)); assertEquals(inputStream1, extractor.next().data().get()); @@ -306,29 +225,31 @@ public void testExtractionGivenAutoChunkAndScrollSize1000() throws IOException { assertThat(extractor.next().data().isPresent(), is(false)); assertThat(extractor.hasNext(), is(false)); + verify(dataExtractorFactory).newExtractor(100000L, 450000L); verify(dataExtractorFactory).newExtractor(100000L, 300000L); verify(dataExtractorFactory).newExtractor(300000L, 450000L); Mockito.verifyNoMoreInteractions(dataExtractorFactory); - - assertThat(capturedSearchRequests.size(), equalTo(1)); } public void testExtractionGivenAutoChunkAndScrollSize500() throws IOException { chunkSpan = null; scrollSize = 500; - TestDataExtractor extractor = new TestDataExtractor(100000L, 450000L); + DataExtractor extractor = new ChunkedDataExtractor(dataExtractorFactory, createContext(100000L, 450000L)); - // 300K millis * 500 * 10 / 15K docs = 100000 - extractor.setNextResponse(createSearchResponse(15000L, 100000L, 400000L)); + DataExtractor summaryExtractor = new StubSubExtractor( + new SearchInterval(100000L, 450000L), + new DataSummary(100000L, 400000L, 15000L) + ); + when(dataExtractorFactory.newExtractor(100000L, 450000L)).thenReturn(summaryExtractor); InputStream inputStream1 = mock(InputStream.class); InputStream inputStream2 = mock(InputStream.class); - DataExtractor subExtactor1 = new StubSubExtractor(new SearchInterval(100_000L, 200_000L), inputStream1); - when(dataExtractorFactory.newExtractor(100000L, 200000L)).thenReturn(subExtactor1); + DataExtractor subExtractor1 = new StubSubExtractor(new SearchInterval(100_000L, 200_000L), inputStream1); + when(dataExtractorFactory.newExtractor(100000L, 200000L)).thenReturn(subExtractor1); - DataExtractor subExtactor2 = new StubSubExtractor(new SearchInterval(200_000L, 300_000L), inputStream2); - when(dataExtractorFactory.newExtractor(200000L, 300000L)).thenReturn(subExtactor2); + DataExtractor subExtractor2 = new StubSubExtractor(new SearchInterval(200_000L, 300_000L), inputStream2); + when(dataExtractorFactory.newExtractor(200000L, 300000L)).thenReturn(subExtractor2); assertThat(extractor.hasNext(), is(true)); assertEquals(inputStream1, extractor.next().data().get()); @@ -336,28 +257,31 @@ public void testExtractionGivenAutoChunkAndScrollSize500() throws IOException { assertEquals(inputStream2, extractor.next().data().get()); assertThat(extractor.hasNext(), is(true)); + verify(dataExtractorFactory).newExtractor(100000L, 450000L); verify(dataExtractorFactory).newExtractor(100000L, 200000L); verify(dataExtractorFactory).newExtractor(200000L, 300000L); - - assertThat(capturedSearchRequests.size(), equalTo(1)); } public void testExtractionGivenAutoChunkIsLessThanMinChunk() throws IOException { chunkSpan = null; scrollSize = 1000; - TestDataExtractor extractor = new TestDataExtractor(100000L, 450000L); + DataExtractor extractor = new ChunkedDataExtractor(dataExtractorFactory, createContext(100000L, 450000L)); // 30K millis * 1000 * 10 / 150K docs = 2000 < min of 60K - extractor.setNextResponse(createSearchResponse(150000L, 100000L, 400000L)); + DataExtractor summaryExtractor = new StubSubExtractor( + new SearchInterval(100000L, 450000L), + new DataSummary(100000L, 400000L, 150000L) + ); + when(dataExtractorFactory.newExtractor(100000L, 450000L)).thenReturn(summaryExtractor); InputStream inputStream1 = mock(InputStream.class); InputStream inputStream2 = mock(InputStream.class); - DataExtractor subExtactor1 = new StubSubExtractor(new SearchInterval(100_000L, 160_000L), inputStream1); - when(dataExtractorFactory.newExtractor(100000L, 160000L)).thenReturn(subExtactor1); + DataExtractor subExtractor1 = new StubSubExtractor(new SearchInterval(100_000L, 160_000L), inputStream1); + when(dataExtractorFactory.newExtractor(100000L, 160000L)).thenReturn(subExtractor1); - DataExtractor subExtactor2 = new StubSubExtractor(new SearchInterval(160_000L, 220_000L), inputStream2); - when(dataExtractorFactory.newExtractor(160000L, 220000L)).thenReturn(subExtactor2); + DataExtractor subExtractor2 = new StubSubExtractor(new SearchInterval(160_000L, 220_000L), inputStream2); + when(dataExtractorFactory.newExtractor(160000L, 220000L)).thenReturn(subExtractor2); assertThat(extractor.hasNext(), is(true)); assertEquals(inputStream1, extractor.next().data().get()); @@ -365,24 +289,24 @@ public void testExtractionGivenAutoChunkIsLessThanMinChunk() throws IOException assertEquals(inputStream2, extractor.next().data().get()); assertThat(extractor.hasNext(), is(true)); + verify(dataExtractorFactory).newExtractor(100000L, 450000L); verify(dataExtractorFactory).newExtractor(100000L, 160000L); verify(dataExtractorFactory).newExtractor(160000L, 220000L); Mockito.verifyNoMoreInteractions(dataExtractorFactory); - - assertThat(capturedSearchRequests.size(), equalTo(1)); } public void testExtractionGivenAutoChunkAndDataTimeSpreadIsZero() throws IOException { chunkSpan = null; scrollSize = 1000; - TestDataExtractor extractor = new TestDataExtractor(100L, 500L); + DataExtractor extractor = new ChunkedDataExtractor(dataExtractorFactory, createContext(100L, 500L)); - extractor.setNextResponse(createSearchResponse(150000L, 300L, 300L)); + DataExtractor summaryExtractor = new StubSubExtractor(new SearchInterval(100L, 500L), new DataSummary(300L, 300L, 150000L)); + when(dataExtractorFactory.newExtractor(100L, 500L)).thenReturn(summaryExtractor); InputStream inputStream1 = mock(InputStream.class); - DataExtractor subExtactor1 = new StubSubExtractor(new SearchInterval(300L, 500L), inputStream1); - when(dataExtractorFactory.newExtractor(300L, 500L)).thenReturn(subExtactor1); + DataExtractor subExtractor1 = new StubSubExtractor(new SearchInterval(300L, 500L), inputStream1); + when(dataExtractorFactory.newExtractor(300L, 500L)).thenReturn(subExtractor1); assertThat(extractor.hasNext(), is(true)); assertEquals(inputStream1, extractor.next().data().get()); @@ -390,24 +314,20 @@ public void testExtractionGivenAutoChunkAndDataTimeSpreadIsZero() throws IOExcep assertThat(extractor.next().data().isPresent(), is(false)); assertThat(extractor.hasNext(), is(false)); + verify(dataExtractorFactory).newExtractor(100L, 500L); verify(dataExtractorFactory).newExtractor(300L, 500L); Mockito.verifyNoMoreInteractions(dataExtractorFactory); - - assertThat(capturedSearchRequests.size(), equalTo(1)); } public void testExtractionGivenAutoChunkAndTotalTimeRangeSmallerThanChunk() throws IOException { chunkSpan = null; scrollSize = 1000; - TestDataExtractor extractor = new TestDataExtractor(1L, 101L); + DataExtractor extractor = new ChunkedDataExtractor(dataExtractorFactory, createContext(1L, 101L)); // 100 millis * 1000 * 10 / 10 docs = 100000 - extractor.setNextResponse(createSearchResponse(10L, 1L, 101L)); - InputStream inputStream1 = mock(InputStream.class); - - DataExtractor subExtactor1 = new StubSubExtractor(new SearchInterval(1L, 10L), inputStream1); - when(dataExtractorFactory.newExtractor(1L, 101L)).thenReturn(subExtactor1); + DataExtractor stubExtractor = new StubSubExtractor(new SearchInterval(1L, 101L), new DataSummary(1L, 101L, 10L), inputStream1); + when(dataExtractorFactory.newExtractor(1L, 101L)).thenReturn(stubExtractor); assertThat(extractor.hasNext(), is(true)); assertEquals(inputStream1, extractor.next().data().get()); @@ -415,19 +335,21 @@ public void testExtractionGivenAutoChunkAndTotalTimeRangeSmallerThanChunk() thro assertThat(extractor.next().data().isPresent(), is(false)); assertThat(extractor.hasNext(), is(false)); - verify(dataExtractorFactory).newExtractor(1L, 101L); + verify(dataExtractorFactory, times(2)).newExtractor(1L, 101L); Mockito.verifyNoMoreInteractions(dataExtractorFactory); - - assertThat(capturedSearchRequests.size(), equalTo(1)); } public void testExtractionGivenAutoChunkAndIntermediateEmptySearchShouldReconfigure() throws IOException { chunkSpan = null; scrollSize = 500; - TestDataExtractor extractor = new TestDataExtractor(100000L, 400000L); + DataExtractor extractor = new ChunkedDataExtractor(dataExtractorFactory, createContext(100000L, 400000L)); // 300K millis * 500 * 10 / 15K docs = 100000 - extractor.setNextResponse(createSearchResponse(15000L, 100000L, 400000L)); + DataExtractor summaryExtractor = new StubSubExtractor( + new SearchInterval(100000L, 400000L), + new DataSummary(100000L, 400000L, 15000L) + ); + when(dataExtractorFactory.newExtractor(100000L, 400000L)).thenReturn(summaryExtractor); InputStream inputStream1 = mock(InputStream.class); @@ -443,44 +365,30 @@ public void testExtractionGivenAutoChunkAndIntermediateEmptySearchShouldReconfig assertThat(extractor.hasNext(), is(true)); // Now we have: 200K millis * 500 * 10 / 5K docs = 200000 - extractor.setNextResponse(createSearchResponse(5000, 200000L, 400000L)); - - // This is the last one InputStream inputStream2 = mock(InputStream.class); - DataExtractor subExtractor3 = new StubSubExtractor(new SearchInterval(200_000L, 400_000L), inputStream2); - when(dataExtractorFactory.newExtractor(200000, 400000)).thenReturn(subExtractor3); + DataExtractor newExtractor = new StubSubExtractor( + new SearchInterval(300000L, 400000L), + new DataSummary(300000L, 400000L, 5000L), + inputStream2 + ); + when(dataExtractorFactory.newExtractor(300000L, 400000L)).thenReturn(newExtractor); assertEquals(inputStream2, extractor.next().data().get()); assertThat(extractor.next().data().isPresent(), is(false)); assertThat(extractor.hasNext(), is(false)); - verify(dataExtractorFactory).newExtractor(100000L, 200000L); - verify(dataExtractorFactory).newExtractor(200000L, 300000L); - verify(dataExtractorFactory).newExtractor(200000L, 400000L); + verify(dataExtractorFactory).newExtractor(100000L, 400000L); // Initial summary + verify(dataExtractorFactory).newExtractor(100000L, 200000L); // Chunk 1 + verify(dataExtractorFactory).newExtractor(200000L, 300000L); // Chunk 2 with no data + verify(dataExtractorFactory, times(2)).newExtractor(300000L, 400000L); // Reconfigure and new chunk Mockito.verifyNoMoreInteractions(dataExtractorFactory); - - assertThat(capturedSearchRequests.size(), equalTo(2)); - - String searchRequest = capturedSearchRequests.get(0).toString().replaceAll("\\s", ""); - assertThat(searchRequest, containsString("\"gte\":100000,\"lt\":400000")); - searchRequest = capturedSearchRequests.get(1).toString().replaceAll("\\s", ""); - assertThat(searchRequest, containsString("\"gte\":300000,\"lt\":400000")); } public void testCancelGivenNextWasNeverCalled() { chunkSpan = TimeValue.timeValueSeconds(1); - TestDataExtractor extractor = new TestDataExtractor(1000L, 2300L); - extractor.setNextResponse(createSearchResponse(10L, 1000L, 2200L)); - - InputStream inputStream1 = mock(InputStream.class); - - DataExtractor subExtactor1 = new StubSubExtractor(new SearchInterval(1000L, 2000L), inputStream1); - when(dataExtractorFactory.newExtractor(1000L, 2000L)).thenReturn(subExtactor1); - + DataExtractor extractor = new ChunkedDataExtractor(dataExtractorFactory, createContext(1000L, 2300L)); assertThat(extractor.hasNext(), is(true)); - extractor.cancel(); - assertThat(extractor.isCancelled(), is(true)); assertThat(extractor.hasNext(), is(false)); Mockito.verifyNoMoreInteractions(dataExtractorFactory); @@ -488,14 +396,16 @@ public void testCancelGivenNextWasNeverCalled() { public void testCancelGivenCurrentSubExtractorHasMore() throws IOException { chunkSpan = TimeValue.timeValueSeconds(1); - TestDataExtractor extractor = new TestDataExtractor(1000L, 2300L); - extractor.setNextResponse(createSearchResponse(10L, 1000L, 2200L)); + DataExtractor extractor = new ChunkedDataExtractor(dataExtractorFactory, createContext(1000L, 2300L)); + + DataExtractor summaryExtractor = new StubSubExtractor(new SearchInterval(1000L, 2300L), new DataSummary(1000L, 2200L, 10L)); + when(dataExtractorFactory.newExtractor(1000L, 2300L)).thenReturn(summaryExtractor); InputStream inputStream1 = mock(InputStream.class); InputStream inputStream2 = mock(InputStream.class); - DataExtractor subExtactor1 = new StubSubExtractor(new SearchInterval(1000L, 2000L), inputStream1, inputStream2); - when(dataExtractorFactory.newExtractor(1000L, 2000L)).thenReturn(subExtactor1); + DataExtractor subExtractor1 = new StubSubExtractor(new SearchInterval(1000L, 2000L), inputStream1, inputStream2); + when(dataExtractorFactory.newExtractor(1000L, 2000L)).thenReturn(subExtractor1); assertThat(extractor.hasNext(), is(true)); assertEquals(inputStream1, extractor.next().data().get()); @@ -509,19 +419,23 @@ public void testCancelGivenCurrentSubExtractorHasMore() throws IOException { assertThat(extractor.next().data().isPresent(), is(false)); assertThat(extractor.hasNext(), is(false)); + verify(dataExtractorFactory).newExtractor(1000L, 2300L); verify(dataExtractorFactory).newExtractor(1000L, 2000L); Mockito.verifyNoMoreInteractions(dataExtractorFactory); } public void testCancelGivenCurrentSubExtractorIsDone() throws IOException { chunkSpan = TimeValue.timeValueSeconds(1); - TestDataExtractor extractor = new TestDataExtractor(1000L, 2300L); - extractor.setNextResponse(createSearchResponse(10L, 1000L, 2200L)); + + DataExtractor extractor = new ChunkedDataExtractor(dataExtractorFactory, createContext(1000L, 2300L)); + + DataExtractor summaryExtractor = new StubSubExtractor(new SearchInterval(1000L, 2300L), new DataSummary(1000L, 2200L, 10L)); + when(dataExtractorFactory.newExtractor(1000L, 2300L)).thenReturn(summaryExtractor); InputStream inputStream1 = mock(InputStream.class); - DataExtractor subExtactor1 = new StubSubExtractor(new SearchInterval(1000L, 3000L), inputStream1); - when(dataExtractorFactory.newExtractor(1000L, 2000L)).thenReturn(subExtactor1); + DataExtractor subExtractor1 = new StubSubExtractor(new SearchInterval(1000L, 3000L), inputStream1); + when(dataExtractorFactory.newExtractor(1000L, 2000L)).thenReturn(subExtractor1); assertThat(extractor.hasNext(), is(true)); assertEquals(inputStream1, extractor.next().data().get()); @@ -533,66 +447,27 @@ public void testCancelGivenCurrentSubExtractorIsDone() throws IOException { assertThat(extractor.next().data().isPresent(), is(false)); assertThat(extractor.hasNext(), is(false)); + verify(dataExtractorFactory).newExtractor(1000L, 2300L); verify(dataExtractorFactory).newExtractor(1000L, 2000L); Mockito.verifyNoMoreInteractions(dataExtractorFactory); } public void testDataSummaryRequestIsFailed() { chunkSpan = TimeValue.timeValueSeconds(2); - TestDataExtractor extractor = new TestDataExtractor(1000L, 2300L); - extractor.setNextResponseToError(new SearchPhaseExecutionException("search phase 1", "boom", ShardSearchFailure.EMPTY_ARRAY)); + DataExtractor extractor = new ChunkedDataExtractor(dataExtractorFactory, createContext(1000L, 2300L)); + when(dataExtractorFactory.newExtractor(1000L, 2300L)).thenThrow( + new SearchPhaseExecutionException("search phase 1", "boom", ShardSearchFailure.EMPTY_ARRAY) + ); assertThat(extractor.hasNext(), is(true)); expectThrows(SearchPhaseExecutionException.class, extractor::next); } public void testNoDataSummaryHasNoData() { - ChunkedDataExtractor.DataSummary summary = ChunkedDataExtractor.AggregatedDataSummary.noDataSummary(randomNonNegativeLong()); + DataSummary summary = new DataSummary(null, null, 0L); assertFalse(summary.hasData()); } - private SearchResponse createSearchResponse(long totalHits, long earliestTime, long latestTime) { - SearchResponse searchResponse = mock(SearchResponse.class); - when(searchResponse.status()).thenReturn(RestStatus.OK); - SearchHit[] hits = new SearchHit[(int) totalHits]; - Arrays.fill(hits, SearchHit.unpooled(1)); - SearchHits searchHits = SearchHits.unpooled(hits, new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), 1); - when(searchResponse.getHits()).thenReturn(searchHits); - - List aggs = new ArrayList<>(); - Min min = mock(Min.class); - when(min.value()).thenReturn((double) earliestTime); - when(min.getName()).thenReturn("earliest_time"); - aggs.add(min); - Max max = mock(Max.class); - when(max.value()).thenReturn((double) latestTime); - when(max.getName()).thenReturn("latest_time"); - aggs.add(max); - InternalAggregations aggregations = InternalAggregations.from(aggs); - when(searchResponse.getAggregations()).thenReturn(aggregations); - return searchResponse; - } - - private SearchResponse createNullSearchResponse() { - SearchResponse searchResponse = mock(SearchResponse.class); - when(searchResponse.status()).thenReturn(RestStatus.OK); - SearchHits searchHits = SearchHits.empty(new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1); - when(searchResponse.getHits()).thenReturn(searchHits); - - List aggs = new ArrayList<>(); - Min min = mock(Min.class); - when(min.value()).thenReturn(Double.POSITIVE_INFINITY); - when(min.getName()).thenReturn("earliest_time"); - aggs.add(min); - Max max = mock(Max.class); - when(max.value()).thenReturn(Double.POSITIVE_INFINITY); - when(max.getName()).thenReturn("latest_time"); - aggs.add(max); - InternalAggregations aggregations = InternalAggregations.from(aggs); - when(searchResponse.getAggregations()).thenReturn(aggregations); - return searchResponse; - } - private ChunkedDataExtractorContext createContext(long start, long end) { return createContext(start, end, false, null); } @@ -600,32 +475,38 @@ private ChunkedDataExtractorContext createContext(long start, long end) { private ChunkedDataExtractorContext createContext(long start, long end, boolean hasAggregations, Long histogramInterval) { return new ChunkedDataExtractorContext( jobId, - timeField, - indices, - QueryBuilders.matchAllQuery(), scrollSize, start, end, chunkSpan, ChunkedDataExtractorFactory.newIdentityTimeAligner(), - Collections.emptyMap(), hasAggregations, - histogramInterval, - SearchRequest.DEFAULT_INDICES_OPTIONS, - Collections.emptyMap() + histogramInterval ); } private static class StubSubExtractor implements DataExtractor { - final SearchInterval searchInterval; - List streams = new ArrayList<>(); - boolean hasNext = true; + + private final DataSummary summary; + private final SearchInterval searchInterval; + private final List streams = new ArrayList<>(); + private boolean hasNext = true; StubSubExtractor(SearchInterval searchInterval, InputStream... streams) { + this(searchInterval, null, streams); + } + + StubSubExtractor(SearchInterval searchInterval, DataSummary summary, InputStream... streams) { this.searchInterval = searchInterval; + this.summary = summary; Collections.addAll(this.streams, streams); } + @Override + public DataSummary getSummary() { + return summary; + } + @Override public boolean hasNext() { return hasNext; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorTests.java index f3eab09b7bc2e..d994b14265a26 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractorTests.java @@ -31,6 +31,9 @@ import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.aggregations.metrics.Max; +import org.elasticsearch.search.aggregations.metrics.Min; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -39,6 +42,7 @@ import org.elasticsearch.xpack.ml.datafeed.DatafeedTimingStatsReporter; import org.elasticsearch.xpack.ml.datafeed.DatafeedTimingStatsReporter.DatafeedTimingStatsPersister; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor; +import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractor.DataSummary; import org.elasticsearch.xpack.ml.extractor.DocValueField; import org.elasticsearch.xpack.ml.extractor.ExtractedField; import org.elasticsearch.xpack.ml.extractor.TimeField; @@ -65,6 +69,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; import static org.mockito.ArgumentMatchers.same; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -512,6 +517,37 @@ public void testDomainSplitScriptField() throws IOException { assertThat(capturedClearScrollIds.get(0), equalTo(response2.getScrollId())); } + public void testGetSummary() { + ScrollDataExtractorContext context = createContext(1000L, 2300L); + TestDataExtractor extractor = new TestDataExtractor(context); + extractor.setNextResponse(createSummaryResponse(1001L, 2299L, 10L)); + + DataSummary summary = extractor.getSummary(); + assertThat(summary.earliestTime(), equalTo(1001L)); + assertThat(summary.latestTime(), equalTo(2299L)); + assertThat(summary.totalHits(), equalTo(10L)); + + assertThat(capturedSearchRequests.size(), equalTo(1)); + String searchRequest = capturedSearchRequests.get(0).toString().replaceAll("\\s", ""); + assertThat(searchRequest, containsString("\"size\":0")); + assertThat( + searchRequest, + containsString( + "\"query\":{\"bool\":{\"filter\":[{\"match_all\":{\"boost\":1.0}}," + + "{\"range\":{\"time\":{\"gte\":1000,\"lt\":2300," + + "\"format\":\"epoch_millis\",\"boost\":1.0}}}]" + ) + ); + assertThat( + searchRequest, + containsString( + "\"aggregations\":{\"earliest_time\":{\"min\":{\"field\":\"time\"}}," + "\"latest_time\":{\"max\":{\"field\":\"time\"}}}}" + ) + ); + assertThat(searchRequest, not(containsString("\"track_total_hits\":false"))); + assertThat(searchRequest, not(containsString("\"sort\""))); + } + private ScrollDataExtractorContext createContext(long start, long end) { return new ScrollDataExtractorContext( jobId, @@ -553,6 +589,17 @@ private SearchResponse createSearchResponse(List timestamps, List return searchResponse; } + private SearchResponse createSummaryResponse(long start, long end, long totalHits) { + SearchResponse searchResponse = mock(SearchResponse.class); + when(searchResponse.getHits()).thenReturn( + new SearchHits(SearchHits.EMPTY, new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), 1) + ); + when(searchResponse.getAggregations()).thenReturn( + InternalAggregations.from(List.of(new Min("earliest_time", start, null, null), new Max("latest_time", end, null, null))) + ); + return searchResponse; + } + private List getCapturedClearScrollIds() { return capturedClearScrollRequests.getAllValues().stream().map(r -> r.getScrollIds().get(0)).collect(Collectors.toList()); } From 71c3f34ce5c50cf04c35c1a65a4c15e78672949e Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 5 Feb 2024 10:19:53 +0100 Subject: [PATCH 012/106] Speedup slicing from ReleasableBytesReference some more (#105108) We can speed up the slice operation quite a bit by speeding up skip for the common case and passing the delegete as the basis for the stream (this neatly avoids a multi-morphic call to `length` on the bytes reference). Also, while we're at it, we can speed up the common-case read operation the same way. --- .../common/bytes/BytesReferenceStreamInput.java | 16 ++++++++++++++++ .../common/bytes/ReleasableBytesReference.java | 2 +- 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/common/bytes/BytesReferenceStreamInput.java b/server/src/main/java/org/elasticsearch/common/bytes/BytesReferenceStreamInput.java index 2fca882724bbd..1e30579292d00 100644 --- a/server/src/main/java/org/elasticsearch/common/bytes/BytesReferenceStreamInput.java +++ b/server/src/main/java/org/elasticsearch/common/bytes/BytesReferenceStreamInput.java @@ -143,6 +143,14 @@ public int read() throws IOException { @Override public int read(final byte[] b, final int bOffset, final int len) throws IOException { + if (slice.remaining() >= len) { + slice.get(b, bOffset, len); + return len; + } + return readFromMultipleSlices(b, bOffset, len); + } + + private int readFromMultipleSlices(byte[] b, int bOffset, int len) throws IOException { final int length = bytesReference.length(); final int offset = offset(); if (offset >= length) { @@ -186,6 +194,14 @@ public long skip(long n) throws IOException { if (n <= 0L) { return 0L; } + if (n <= slice.remaining()) { + slice.position(slice.position() + (int) n); + return n; + } + return skipMultiple(n); + } + + private int skipMultiple(long n) throws IOException { assert offset() <= bytesReference.length() : offset() + " vs " + bytesReference.length(); // definitely >= 0 and <= Integer.MAX_VALUE so casting is ok final int numBytesSkipped = (int) Math.min(n, bytesReference.length() - offset()); diff --git a/server/src/main/java/org/elasticsearch/common/bytes/ReleasableBytesReference.java b/server/src/main/java/org/elasticsearch/common/bytes/ReleasableBytesReference.java index 567f39d968200..e9fe63529e17a 100644 --- a/server/src/main/java/org/elasticsearch/common/bytes/ReleasableBytesReference.java +++ b/server/src/main/java/org/elasticsearch/common/bytes/ReleasableBytesReference.java @@ -144,7 +144,7 @@ public long ramBytesUsed() { @Override public StreamInput streamInput() throws IOException { assert hasReferences(); - return new BytesReferenceStreamInput(this) { + return new BytesReferenceStreamInput(delegate) { private ReleasableBytesReference retainAndSkip(int len) throws IOException { if (len == 0) { return ReleasableBytesReference.empty(); From 6a40c04cc1430c918372cbd8aba327ee2b1ed6ce Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 5 Feb 2024 10:04:24 +0000 Subject: [PATCH 013/106] More guidance in balance settings docs (#105119) Today the docs on balancing settings describe what the settings all do but offer little guidance about how to configure them. This commit adds some extra detail to avoid some common misunderstandings and reorders the docs a little so that more commonly-adjusted settings are mentioned earlier. --- .../cluster/shards_allocation.asciidoc | 127 +++++++++++------- .../modules/indices/recovery.asciidoc | 5 +- 2 files changed, 84 insertions(+), 48 deletions(-) diff --git a/docs/reference/modules/cluster/shards_allocation.asciidoc b/docs/reference/modules/cluster/shards_allocation.asciidoc index 5a7aa43155c66..a73a3906bd3fd 100644 --- a/docs/reference/modules/cluster/shards_allocation.asciidoc +++ b/docs/reference/modules/cluster/shards_allocation.asciidoc @@ -22,37 +22,54 @@ one of the active allocation ids in the cluster state. -- +[[cluster-routing-allocation-same-shard-host]] +`cluster.routing.allocation.same_shard.host`:: + (<>) + If `true`, forbids multiple copies of a shard from being allocated to + distinct nodes on the same host, i.e. which have the same network + address. Defaults to `false`, meaning that copies of a shard may + sometimes be allocated to nodes on the same host. This setting is only + relevant if you run multiple nodes on each host. + `cluster.routing.allocation.node_concurrent_incoming_recoveries`:: (<>) - How many concurrent incoming shard recoveries are allowed to happen on a node. Incoming recoveries are the recoveries - where the target shard (most likely the replica unless a shard is relocating) is allocated on the node. Defaults to `2`. + How many concurrent incoming shard recoveries are allowed to happen on a + node. Incoming recoveries are the recoveries where the target shard (most + likely the replica unless a shard is relocating) is allocated on the node. + Defaults to `2`. Increasing this setting may cause shard movements to have + a performance impact on other activity in your cluster, but may not make + shard movements complete noticeably sooner. We do not recommend adjusting + this setting from its default of `2`. `cluster.routing.allocation.node_concurrent_outgoing_recoveries`:: (<>) - How many concurrent outgoing shard recoveries are allowed to happen on a node. Outgoing recoveries are the recoveries - where the source shard (most likely the primary unless a shard is relocating) is allocated on the node. Defaults to `2`. + How many concurrent outgoing shard recoveries are allowed to happen on a + node. Outgoing recoveries are the recoveries where the source shard (most + likely the primary unless a shard is relocating) is allocated on the node. + Defaults to `2`. Increasing this setting may cause shard movements to have + a performance impact on other activity in your cluster, but may not make + shard movements complete noticeably sooner. We do not recommend adjusting + this setting from its default of `2`. `cluster.routing.allocation.node_concurrent_recoveries`:: (<>) - A shortcut to set both `cluster.routing.allocation.node_concurrent_incoming_recoveries` and - `cluster.routing.allocation.node_concurrent_outgoing_recoveries`. Defaults to 2. - + A shortcut to set both + `cluster.routing.allocation.node_concurrent_incoming_recoveries` and + `cluster.routing.allocation.node_concurrent_outgoing_recoveries`. Defaults + to `2`. Increasing this setting may cause shard movements to have a + performance impact on other activity in your cluster, but may not make + shard movements complete noticeably sooner. We do not recommend adjusting + this setting from its default of `2`. `cluster.routing.allocation.node_initial_primaries_recoveries`:: - (<>) - While the recovery of replicas happens over the network, the recovery of - an unassigned primary after node restart uses data from the local disk. - These should be fast so more initial primary recoveries can happen in - parallel on the same node. Defaults to `4`. - -[[cluster-routing-allocation-same-shard-host]] -`cluster.routing.allocation.same_shard.host`:: - (<>) - If `true`, forbids multiple copies of a shard from being allocated to - distinct nodes on the same host, i.e. which have the same network - address. Defaults to `false`, meaning that copies of a shard may - sometimes be allocated to nodes on the same host. This setting is only - relevant if you run multiple nodes on each host. + (<>) + While the recovery of replicas happens over the network, the recovery of + an unassigned primary after node restart uses data from the local disk. + These should be fast so more initial primary recoveries can happen in + parallel on each node. Defaults to `4`. Increasing this setting may cause + shard recoveries to have a performance impact on other activity in your + cluster, but may not make shard recoveries complete noticeably sooner. We + do not recommend adjusting this setting from its default of `4`. [[shards-rebalancing-settings]] ==== Shard rebalancing settings @@ -73,38 +90,44 @@ balancer works independently within each tier. You can use the following settings to control the rebalancing of shards across the cluster: -`cluster.routing.rebalance.enable`:: +`cluster.routing.allocation.allow_rebalance`:: + -- (<>) -Enable or disable rebalancing for specific kinds of shards: +Specify when shard rebalancing is allowed: -* `all` - (default) Allows shard balancing for all kinds of shards. -* `primaries` - Allows shard balancing only for primary shards. -* `replicas` - Allows shard balancing only for replica shards. -* `none` - No shard balancing of any kind are allowed for any indices. + +* `always` - Always allow rebalancing. +* `indices_primaries_active` - Only when all primaries in the cluster are allocated. +* `indices_all_active` - (default) Only when all shards (primaries and replicas) in the cluster are allocated. -- -`cluster.routing.allocation.allow_rebalance`:: +`cluster.routing.rebalance.enable`:: + -- (<>) -Specify when shard rebalancing is allowed: +Enable or disable rebalancing for specific kinds of shards: +* `all` - (default) Allows shard balancing for all kinds of shards. +* `primaries` - Allows shard balancing only for primary shards. +* `replicas` - Allows shard balancing only for replica shards. +* `none` - No shard balancing of any kind are allowed for any indices. -* `always` - Always allow rebalancing. -* `indices_primaries_active` - Only when all primaries in the cluster are allocated. -* `indices_all_active` - (default) Only when all shards (primaries and replicas) in the cluster are allocated. +Rebalancing is important to ensure the cluster returns to a healthy and fully +resilient state after a disruption. If you adjust this setting, remember to set +it back to `all` as soon as possible. -- `cluster.routing.allocation.cluster_concurrent_rebalance`:: (<>) Defines the number of concurrent shard rebalances are allowed across the whole cluster. Defaults to `2`. Note that this setting only controls the number of -concurrent shard relocations due to imbalances in the cluster. This setting does -not limit shard relocations due to +concurrent shard relocations due to imbalances in the cluster. This setting +does not limit shard relocations due to <> or -<>. +<>. Increasing this setting may cause the +cluster to use additional resources moving shards between nodes, so we +generally do not recommend adjusting this setting from its default of `2`. `cluster.routing.allocation.type`:: + @@ -149,6 +172,12 @@ data stream have an estimated write load of zero. The following settings control how {es} combines these values into an overall measure of each node's weight. +`cluster.routing.allocation.balance.threshold`:: +(float, <>) +The minimum improvement in weight which triggers a rebalancing shard movement. +Defaults to `1.0f`. Raising this value will cause {es} to stop rebalancing +shards sooner, leaving the cluster in a more unbalanced state. + `cluster.routing.allocation.balance.shard`:: (float, <>) Defines the weight factor for the total number of shards allocated to each node. @@ -177,19 +206,25 @@ estimated number of indexing threads needed by the shard. Defaults to `10.0f`. Raising this value increases the tendency of {es} to equalize the total write load across nodes ahead of the other balancing variables. -`cluster.routing.allocation.balance.threshold`:: -(float, <>) -The minimum improvement in weight which triggers a rebalancing shard movement. -Defaults to `1.0f`. Raising this value will cause {es} to stop rebalancing -shards sooner, leaving the cluster in a more unbalanced state. - [NOTE] ==== -* It is not recommended to adjust the values of the heuristics settings. The -default values are generally good, and although different values may improve -the current balance, it is possible that they create problems in the future -if the cluster or workload changes. +* If you have a large cluster, it may be unnecessary to keep it in +a perfectly balanced state at all times. It is less resource-intensive for the +cluster to operate in a somewhat unbalanced state rather than to perform all +the shard movements needed to achieve the perfect balance. If so, increase the +value of `cluster.routing.allocation.balance.threshold` to define the +acceptable imbalance between nodes. For instance, if you have an average of 500 +shards per node and can accept a difference of 5% (25 typical shards) between +nodes, set `cluster.routing.allocation.balance.threshold` to `25`. + +* We do not recommend adjusting the values of the heuristic weight factor +settings. The default values work well in all reasonable clusters. Although +different values may improve the current balance in some ways, it is possible +that they will create unexpected problems in the future or prevent it from +gracefully handling an unexpected disruption. + * Regardless of the result of the balancing algorithm, rebalancing might not be allowed due to allocation rules such as forced awareness and allocation -filtering. +filtering. Use the <> API to explain the current +allocation of shards. ==== diff --git a/docs/reference/modules/indices/recovery.asciidoc b/docs/reference/modules/indices/recovery.asciidoc index 02b70c69876ff..261c3d3fc3f24 100644 --- a/docs/reference/modules/indices/recovery.asciidoc +++ b/docs/reference/modules/indices/recovery.asciidoc @@ -38,8 +38,9 @@ This limit applies to each node separately. If multiple nodes in a cluster perform recoveries at the same time, the cluster's total recovery traffic may exceed this limit. + -If this limit is too high, ongoing recoveries may consume an excess of bandwidth -and other resources, which can destabilize the cluster. +If this limit is too high, ongoing recoveries may consume an excess of +bandwidth and other resources, which can have a performance impact on your +cluster and in extreme cases may destabilize it. + This is a dynamic setting, which means you can set it in each node's `elasticsearch.yml` config file and you can update it dynamically using the From 931f2c48c916536626b4fbbb86a6595ac4f9b7d2 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Mon, 5 Feb 2024 21:06:09 +1100 Subject: [PATCH 014/106] [Docs] Fix a doc bug for Flush API's force parameter (#105112) The force parameter defaults to false instead of true. --- docs/reference/indices/flush.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/indices/flush.asciidoc b/docs/reference/indices/flush.asciidoc index 1f0a79258bd37..25d39a17af306 100644 --- a/docs/reference/indices/flush.asciidoc +++ b/docs/reference/indices/flush.asciidoc @@ -81,7 +81,7 @@ Defaults to `open`. If `true`, the request forces a flush even if there are no changes to commit to the index. -Defaults to `true`. +Defaults to `false`. You can use this parameter to increment the generation number of the transaction log. From 2a39c32fb0386581e51ac5126bf70d2a2e766d54 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Mon, 5 Feb 2024 10:38:39 +0000 Subject: [PATCH 015/106] Mute IndexRecoveryIT testDoNotInfinitelyWaitForMapping (#105125) For #105122 --- .../java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index bd400f9f0f6a1..1c4dbce2ccf32 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -1039,6 +1039,7 @@ public void testHistoryRetention() throws Exception { assertThat(recoveryState.getTranslog().recoveredOperations(), greaterThan(0)); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105122") public void testDoNotInfinitelyWaitForMapping() { internalCluster().ensureAtLeastNumDataNodes(3); createIndex( From d6bbfc53bbc86e1f6595db1a3c21793ebab5c140 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 5 Feb 2024 11:41:49 +0100 Subject: [PATCH 016/106] Unmute DownsampleActionIT#testRollupNonTSIndex(...) (#105116) and add more logging for when test fails next time. Relates to #103981 --- .../xpack/ilm/actions/DownsampleActionIT.java | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java index a6fa7cd3ffbc6..8a7ec329e55c3 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java @@ -394,7 +394,6 @@ public void testILMWaitsForTimeSeriesEndTimeToLapse() throws Exception { }, 30, TimeUnit.SECONDS); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103981") public void testRollupNonTSIndex() throws Exception { createIndex(index, alias, false); index(client(), index, true, null, "@timestamp", "2020-01-01T05:10:00Z", "volume", 11.0, "metricset", randomAlphaOfLength(5)); @@ -404,10 +403,19 @@ public void testRollupNonTSIndex() throws Exception { createNewSingletonPolicy(client(), policy, phaseName, new DownsampleAction(fixedInterval, DownsampleAction.DEFAULT_WAIT_TIMEOUT)); updatePolicy(client(), index, policy); - assertBusy(() -> assertThat(getStepKeyForIndex(client(), index), equalTo(PhaseCompleteStep.finalStep(phaseName).getKey()))); - String rollupIndex = getRollupIndexName(client(), index, fixedInterval); - assertNull("Rollup index should not have been created", rollupIndex); - assertTrue("Source index should not have been deleted", indexExists(index)); + try { + assertBusy(() -> assertThat(getStepKeyForIndex(client(), index), equalTo(PhaseCompleteStep.finalStep(phaseName).getKey()))); + String rollupIndex = getRollupIndexName(client(), index, fixedInterval); + assertNull("Rollup index should not have been created", rollupIndex); + assertTrue("Source index should not have been deleted", indexExists(index)); + } catch (AssertionError ea) { + logger.warn( + "--> original index name is [{}], rollup index name is NULL, possible explanation: {}", + index, + explainIndex(client(), index) + ); + throw ea; + } } @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/101428") From 9bc2a7045ead36870110926d94fb609c6512b35d Mon Sep 17 00:00:00 2001 From: Dmitry Cherniachenko <2sabio@gmail.com> Date: Mon, 5 Feb 2024 11:48:41 +0100 Subject: [PATCH 017/106] Minor grammar fixes (StreamInput.java) (#99857) --- .../org/elasticsearch/common/io/stream/StreamInput.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java index c4f0dc58f5ffd..9e271ee6f9bfc 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java @@ -364,7 +364,7 @@ public Text readOptionalText() throws IOException { } public Text readText() throws IOException { - // use StringAndBytes so we can cache the string if its ever converted to it + // use StringAndBytes so we can cache the string if it's ever converted to it int length = readInt(); return new Text(readBytesReference(length)); } @@ -1271,8 +1271,8 @@ protected int readArraySize() throws IOException { if (arraySize < 0) { throwNegative(arraySize); } - // lets do a sanity check that if we are reading an array size that is bigger that the remaining bytes we can safely - // throw an exception instead of allocating the array based on the size. A simple corrutpted byte can make a node go OOM + // let's do a sanity check that if we are reading an array size that is bigger that the remaining bytes we can safely + // throw an exception instead of allocating the array based on the size. A simple corrupted byte can make a node go OOM // if the size is large and for perf reasons we allocate arrays ahead of time ensureCanReadBytes(arraySize); return arraySize; @@ -1287,7 +1287,7 @@ private static void throwExceedsMaxArraySize(int arraySize) { } /** - * This method throws an {@link EOFException} if the given number of bytes can not be read from the this stream. This method might + * This method throws an {@link EOFException} if the given number of bytes can not be read from the stream. This method might * be a no-op depending on the underlying implementation if the information of the remaining bytes is not present. */ protected abstract void ensureCanReadBytes(int length) throws EOFException; From 9f2d38856d3f689344fe768e10a59a5eb91438c4 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 5 Feb 2024 11:56:36 +0100 Subject: [PATCH 018/106] Simplify EQL logic that references SearchHit (#105060) I tried to move this logic to use pooled SearchHit instances but it turned out to be too complicated in one go, so simplifying obvious spots here: * ReversePayload is pointless, it just reverses the original payload. * a number of listeners were unnecessary and could be expressed inline much clearer * moved some "unpooling" to later in the logic to make objects live for shorter and have fewer references to them --- .../xpack/core/async/AsyncResponse.java | 2 +- .../xpack/eql/action/EqlSearchResponse.java | 6 ++- .../eql/execution/payload/EventPayload.java | 11 +++-- .../eql/execution/payload/ReversePayload.java | 44 ------------------- .../eql/execution/sample/SamplePayload.java | 4 +- .../eql/execution/search/AsEventListener.java | 26 ----------- .../execution/search/BasicQueryClient.java | 6 +-- .../eql/execution/search/ReverseListener.java | 25 ----------- .../execution/sequence/SequencePayload.java | 4 +- .../execution/sequence/TumblingWindow.java | 3 +- .../xpack/eql/plan/physical/EsQueryExec.java | 11 +++-- .../eql/action/EqlSearchResponseTests.java | 4 +- 12 files changed, 25 insertions(+), 121 deletions(-) delete mode 100644 x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/payload/ReversePayload.java delete mode 100644 x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/AsEventListener.java delete mode 100644 x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/ReverseListener.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncResponse.java index b31544a1921a6..f6a9bd8474838 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncResponse.java @@ -10,7 +10,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.RefCounted; -public interface AsyncResponse> extends Writeable, RefCounted { +public interface AsyncResponse> extends Writeable, RefCounted { /** * When this response will expire as a timestamp in milliseconds since epoch. */ diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java index f9f9238b6c4ab..5eef57cbb6c5b 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java @@ -22,6 +22,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.mapper.SourceFieldMapper; +import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.InstantiatingObjectParser; @@ -43,6 +44,7 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; +import static org.elasticsearch.xpack.eql.util.SearchHitUtils.qualifiedIndex; public class EqlSearchResponse extends ActionResponse implements ToXContentObject, QlStatusResponse.AsyncStatus { @@ -260,8 +262,8 @@ private static final class Fields { private final boolean missing; - public Event(String index, String id, BytesReference source, Map fetchFields) { - this(index, id, source, fetchFields, false); + public Event(SearchHit hit) { + this(qualifiedIndex(hit), hit.getId(), hit.getSourceRef(), hit.getDocumentFields(), false); } public Event(String index, String id, BytesReference source, Map fetchFields, Boolean missing) { diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/payload/EventPayload.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/payload/EventPayload.java index 0749b53c7b1cf..a7845ca62dccc 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/payload/EventPayload.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/payload/EventPayload.java @@ -9,14 +9,12 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; import org.elasticsearch.xpack.eql.action.EqlSearchResponse.Event; -import org.elasticsearch.xpack.eql.execution.search.RuntimeUtils; import java.util.ArrayList; import java.util.List; -import static org.elasticsearch.xpack.eql.util.SearchHitUtils.qualifiedIndex; - public class EventPayload extends AbstractPayload { private final List values; @@ -24,10 +22,11 @@ public class EventPayload extends AbstractPayload { public EventPayload(SearchResponse response) { super(response.isTimedOut(), response.getTook()); - List hits = RuntimeUtils.searchHits(response); - values = new ArrayList<>(hits.size()); + SearchHits hits = response.getHits(); + values = new ArrayList<>(hits.getHits().length); for (SearchHit hit : hits) { - values.add(new Event(qualifiedIndex(hit), hit.getId(), hit.getSourceRef(), hit.getDocumentFields())); + // TODO: remove unpooled usage + values.add(new Event(hit.asUnpooled())); } } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/payload/ReversePayload.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/payload/ReversePayload.java deleted file mode 100644 index 533dc3a992e74..0000000000000 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/payload/ReversePayload.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.eql.execution.payload; - -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xpack.eql.session.Payload; - -import java.util.Collections; -import java.util.List; - -public class ReversePayload implements Payload { - - private final Payload delegate; - - public ReversePayload(Payload delegate) { - this.delegate = delegate; - Collections.reverse(delegate.values()); - } - - @Override - public Type resultType() { - return delegate.resultType(); - } - - @Override - public boolean timedOut() { - return delegate.timedOut(); - } - - @Override - public TimeValue timeTook() { - return delegate.timeTook(); - } - - @Override - public List values() { - return delegate.values(); - } -} diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sample/SamplePayload.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sample/SamplePayload.java index ddd33e58f5448..121f4c208273b 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sample/SamplePayload.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sample/SamplePayload.java @@ -15,8 +15,6 @@ import java.util.ArrayList; import java.util.List; -import static org.elasticsearch.xpack.eql.util.SearchHitUtils.qualifiedIndex; - class SamplePayload extends AbstractPayload { private final List values; @@ -30,7 +28,7 @@ class SamplePayload extends AbstractPayload { List hits = docs.get(i); List events = new ArrayList<>(hits.size()); for (SearchHit hit : hits) { - events.add(new Event(qualifiedIndex(hit), hit.getId(), hit.getSourceRef(), hit.getDocumentFields())); + events.add(new Event(hit)); } values.add(new org.elasticsearch.xpack.eql.action.EqlSearchResponse.Sequence(s.key().asList(), events)); } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/AsEventListener.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/AsEventListener.java deleted file mode 100644 index 122e28f4e50b8..0000000000000 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/AsEventListener.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.eql.execution.search; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.DelegatingActionListener; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.xpack.eql.execution.payload.EventPayload; -import org.elasticsearch.xpack.eql.session.Payload; - -public class AsEventListener extends DelegatingActionListener { - - public AsEventListener(ActionListener listener) { - super(listener); - } - - @Override - public void onResponse(SearchResponse response) { - delegate.onResponse(new EventPayload(response)); - } -} diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/BasicQueryClient.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/BasicQueryClient.java index ceaf8bcbb6b6f..6cbe5298b5950 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/BasicQueryClient.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/BasicQueryClient.java @@ -159,13 +159,13 @@ public void fetchHits(Iterable> refs, ActionListener docs = RuntimeUtils.searchHits(item.getResponse()); // for each doc, find its reference and its position inside the matrix - for (SearchHit doc : docs) { + for (SearchHit doc : item.getResponse().getHits()) { HitReference docRef = new HitReference(doc); List positions = referenceToPosition.get(docRef); positions.forEach(pos -> { - SearchHit previous = seq.get(pos / listSize).set(pos % listSize, doc); + // TODO: stop using unpooled + SearchHit previous = seq.get(pos / listSize).set(pos % listSize, doc.asUnpooled()); if (previous != null) { throw new EqlIllegalArgumentException( "Overriding sequence match [{}] with [{}]", diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/ReverseListener.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/ReverseListener.java deleted file mode 100644 index bc6fd8c82b85a..0000000000000 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/ReverseListener.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.eql.execution.search; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.DelegatingActionListener; -import org.elasticsearch.xpack.eql.execution.payload.ReversePayload; -import org.elasticsearch.xpack.eql.session.Payload; - -public class ReverseListener extends DelegatingActionListener { - - public ReverseListener(ActionListener delegate) { - super(delegate); - } - - @Override - public void onResponse(Payload response) { - delegate.onResponse(new ReversePayload(response)); - } -} diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/SequencePayload.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/SequencePayload.java index 95e18d54f5a08..45083babddbb4 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/SequencePayload.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/SequencePayload.java @@ -15,8 +15,6 @@ import java.util.ArrayList; import java.util.List; -import static org.elasticsearch.xpack.eql.util.SearchHitUtils.qualifiedIndex; - class SequencePayload extends AbstractPayload { private final List values; @@ -33,7 +31,7 @@ class SequencePayload extends AbstractPayload { if (hit == null) { events.add(Event.MISSING_EVENT); } else { - events.add(new Event(qualifiedIndex(hit), hit.getId(), hit.getSourceRef(), hit.getDocumentFields())); + events.add(new Event(hit)); } } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/TumblingWindow.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/TumblingWindow.java index d692bc376de01..35f171806ccb2 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/TumblingWindow.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/TumblingWindow.java @@ -732,8 +732,7 @@ private void doPayload(ActionListener listener) { if (criteria.get(matcher.firstPositiveStage).descending()) { Collections.reverse(completed); } - SequencePayload payload = new SequencePayload(completed, addMissingEventPlaceholders(listOfHits), false, timeTook()); - return payload; + return new SequencePayload(completed, addMissingEventPlaceholders(listOfHits), false, timeTook()); })); } diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/EsQueryExec.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/EsQueryExec.java index 4877b4d909a72..6fa61dcd84e48 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/EsQueryExec.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plan/physical/EsQueryExec.java @@ -11,10 +11,9 @@ import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortOrder; -import org.elasticsearch.xpack.eql.execution.search.AsEventListener; +import org.elasticsearch.xpack.eql.execution.payload.EventPayload; import org.elasticsearch.xpack.eql.execution.search.BasicQueryClient; import org.elasticsearch.xpack.eql.execution.search.QueryRequest; -import org.elasticsearch.xpack.eql.execution.search.ReverseListener; import org.elasticsearch.xpack.eql.execution.search.SourceGenerator; import org.elasticsearch.xpack.eql.querydsl.container.QueryContainer; import org.elasticsearch.xpack.eql.session.EqlConfiguration; @@ -24,6 +23,7 @@ import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; +import java.util.Collections; import java.util.List; import java.util.Objects; @@ -71,8 +71,11 @@ public SearchSourceBuilder source(EqlSession session, boolean includeFetchFields public void execute(EqlSession session, ActionListener listener) { // endpoint - fetch all source QueryRequest request = () -> source(session, true).fetchSource(FetchSourceContext.FETCH_SOURCE); - listener = shouldReverse(request) ? new ReverseListener(listener) : listener; - new BasicQueryClient(session).query(request, new AsEventListener(listener)); + new BasicQueryClient(session).query(request, listener.safeMap(shouldReverse(request) ? r -> { + var res = new EventPayload(r); + Collections.reverse(res.values()); + return res; + } : EventPayload::new)); } private static boolean shouldReverse(QueryRequest query) { diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/action/EqlSearchResponseTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/action/EqlSearchResponseTests.java index 255e94d6bda34..6cb283d11848e 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/action/EqlSearchResponseTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/action/EqlSearchResponseTests.java @@ -117,7 +117,7 @@ static List randomEvents(XContentType xType) { if (fetchFields.isEmpty() && randomBoolean()) { fetchFields = null; } - hits.add(new Event(String.valueOf(i), randomAlphaOfLength(10), bytes, fetchFields)); + hits.add(new Event(String.valueOf(i), randomAlphaOfLength(10), bytes, fetchFields, false)); } } } @@ -297,7 +297,7 @@ private List mutateEvents(List original, TransportVersion version) } public void testEmptyIndexAsMissingEvent() throws IOException { - Event event = new Event("", "", new BytesArray("{}".getBytes(StandardCharsets.UTF_8)), null); + Event event = new Event("", "", new BytesArray("{}".getBytes(StandardCharsets.UTF_8)), null, false); BytesStreamOutput out = new BytesStreamOutput(); out.setTransportVersion(TransportVersions.V_8_9_X);// 8.9.1 event.writeTo(out); From e75ca48ece475411f515208761cdb68703259405 Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Mon, 5 Feb 2024 14:11:46 +0100 Subject: [PATCH 019/106] Fix testListTasksWaitForCompletion (#104391) This change attempts to fix testListTasksWaitForCompletion by setting bariers to verify task started on all nodes and had a chance to list all running tasks before canceling the TEST_TASK --- .../admin/cluster/node/tasks/TasksIT.java | 113 +++++++++--------- .../tasks/list/TransportListTasksAction.java | 5 - 2 files changed, 58 insertions(+), 60 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java index 884f6dbcd677e..0766b732099c4 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java @@ -48,10 +48,10 @@ import org.elasticsearch.tasks.TaskResult; import org.elasticsearch.tasks.TaskResultsService; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.tasks.MockTaskManager; import org.elasticsearch.test.tasks.MockTaskManagerListener; import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ReceiveTimeoutTransportException; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.XContentType; @@ -64,6 +64,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CountDownLatch; @@ -71,6 +72,7 @@ import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.function.Function; +import java.util.function.Supplier; import java.util.stream.Collectors; import static java.util.Collections.emptyList; @@ -531,22 +533,32 @@ public void testTasksUnblocking() throws Exception { ); } - @TestLogging( - reason = "https://github.com/elastic/elasticsearch/issues/97923", - value = "org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction:TRACE" - ) public void testListTasksWaitForCompletion() throws Exception { - waitForCompletionTestCase( - randomBoolean(), - id -> clusterAdmin().prepareListTasks().setActions(TEST_TASK_ACTION.name()).setWaitForCompletion(true).execute(), - response -> { - assertThat(response.getNodeFailures(), empty()); - assertThat(response.getTaskFailures(), empty()); - assertThat(response.getTasks(), hasSize(1)); - TaskInfo task = response.getTasks().get(0); - assertEquals(TEST_TASK_ACTION.name(), task.action()); + waitForCompletionTestCase(randomBoolean(), id -> { + var future = ensureStartedOnAllNodes( + "cluster:monitor/tasks/lists[n]", + () -> clusterAdmin().prepareListTasks().setActions(TEST_TASK_ACTION.name()).setWaitForCompletion(true).execute() + ); + + // This ensures that a task has progressed to the point of listing all running tasks and subscribing to their updates + for (var threadPool : internalCluster().getInstances(ThreadPool.class)) { + var max = threadPool.info(ThreadPool.Names.MANAGEMENT).getMax(); + var executor = threadPool.executor(ThreadPool.Names.MANAGEMENT); + var waitForManagementToCompleteAllTasks = new CyclicBarrier(max + 1); + for (int i = 0; i < max; i++) { + executor.submit(() -> safeAwait(waitForManagementToCompleteAllTasks)); + } + safeAwait(waitForManagementToCompleteAllTasks); } - ); + + return future; + }, response -> { + assertThat(response.getNodeFailures(), empty()); + assertThat(response.getTaskFailures(), empty()); + assertThat(response.getTasks(), hasSize(1)); + TaskInfo task = response.getTasks().get(0); + assertEquals(TEST_TASK_ACTION.name(), task.action()); + }); } public void testGetTaskWaitForCompletionWithoutStoringResult() throws Exception { @@ -582,34 +594,20 @@ private void waitForCompletionTestCase(boolean storeResult, Function future = client().execute(TEST_TASK_ACTION, request); + ActionFuture future = ensureStartedOnAllNodes( + TEST_TASK_ACTION.name() + "[n]", + () -> client().execute(TEST_TASK_ACTION, request) + ); ActionFuture waitResponseFuture; - TaskId taskId; try { - taskId = waitForTestTaskStartOnAllNodes(); - - // Wait for the task to start - assertBusy(() -> clusterAdmin().prepareGetTask(taskId).get()); - - // Register listeners so we can be sure the waiting started - CountDownLatch waitForWaitingToStart = new CountDownLatch(1); - for (TransportService transportService : internalCluster().getInstances(TransportService.class)) { - ((MockTaskManager) transportService.getTaskManager()).addListener(new MockTaskManagerListener() { - @Override - public void onTaskUnregistered(Task task) { - waitForWaitingToStart.countDown(); - } - }); - } + var tasks = clusterAdmin().prepareListTasks().setActions(TEST_TASK_ACTION.name()).get().getTasks(); + assertThat(tasks, hasSize(1)); + var taskId = tasks.get(0).taskId(); + clusterAdmin().prepareGetTask(taskId).get(); // Spin up a request to wait for the test task to finish waitResponseFuture = wait.apply(taskId); - - /* Wait for the wait to start. This should count down just *before* we wait for completion but after the list/get has got a - * reference to the running task. Because we unblock immediately after this the task may no longer be running for us to wait - * on which is fine. */ - waitForWaitingToStart.await(); } finally { // Unblock the request so the wait for completion request can finish client().execute(UNBLOCK_TASK_ACTION, new TestTaskPlugin.UnblockTestTasksRequest()).get(); @@ -651,14 +649,15 @@ public void testGetTaskWaitForTimeout() throws Exception { */ private void waitForTimeoutTestCase(Function> wait) throws Exception { // Start blocking test task - TestTaskPlugin.NodesRequest request = new TestTaskPlugin.NodesRequest("test"); - ActionFuture future = client().execute(TEST_TASK_ACTION, request); + ActionFuture future = ensureStartedOnAllNodes( + TEST_TASK_ACTION.name() + "[n]", + () -> client().execute(TEST_TASK_ACTION, new TestTaskPlugin.NodesRequest("test")) + ); try { - TaskId taskId = waitForTestTaskStartOnAllNodes(); - - // Wait for the task to start - assertBusy(() -> clusterAdmin().prepareGetTask(taskId).get()); - + var tasks = clusterAdmin().prepareListTasks().setActions(TEST_TASK_ACTION.name()).get().getTasks(); + assertThat(tasks, hasSize(1)); + var taskId = tasks.get(0).taskId(); + clusterAdmin().prepareGetTask(taskId).get(); // Spin up a request that should wait for those tasks to finish // It will timeout because we haven't unblocked the tasks Iterable failures = wait.apply(taskId); @@ -675,17 +674,21 @@ private void waitForTimeoutTestCase(Function { - List tasks = clusterAdmin().prepareListTasks().setActions(TEST_TASK_ACTION.name() + "[n]").get().getTasks(); - assertEquals(internalCluster().size(), tasks.size()); - }); - List task = clusterAdmin().prepareListTasks().setActions(TEST_TASK_ACTION.name()).get().getTasks(); - assertThat(task, hasSize(1)); - return task.get(0).taskId(); + private ActionFuture ensureStartedOnAllNodes(String nodeTaskName, Supplier> taskStarter) { + var startedOnAllNodes = new CountDownLatch(internalCluster().size()); + for (TransportService transportService : internalCluster().getInstances(TransportService.class)) { + ((MockTaskManager) transportService.getTaskManager()).addListener(new MockTaskManagerListener() { + @Override + public void onTaskRegistered(Task task) { + if (Objects.equals(task.getAction(), nodeTaskName)) { + startedOnAllNodes.countDown(); + } + } + }); + } + var future = taskStarter.get(); + safeAwait(startedOnAllNodes); + return future; } public void testTasksListWaitForNoTask() throws Exception { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java index 62ede5b2f480b..4f8a6b6db2980 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java @@ -8,8 +8,6 @@ package org.elasticsearch.action.admin.cluster.node.tasks.list; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.FailedNodeException; @@ -43,8 +41,6 @@ public class TransportListTasksAction extends TransportTasksAction { - private static final Logger logger = LogManager.getLogger(TransportListTasksAction.class); - public static final ActionType TYPE = new ActionType<>("cluster:monitor/tasks/lists"); public static long waitForCompletionTimeout(TimeValue timeout) { @@ -132,7 +128,6 @@ protected void processTasks(CancellableTask nodeTask, ListTasksRequest request, } processedTasks.add(task); } - logger.trace("Matched {} tasks of all running {}", processedTasks, taskManager.getTasks().values()); } catch (Exception e) { allMatchedTasksRemovedListener.onFailure(e); return; From 62dc143ab5f8654b40d1988dc74896fbea5ea9b5 Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Mon, 5 Feb 2024 14:37:36 +0100 Subject: [PATCH 020/106] [Connectors API] Fix bug with crawler configuration parsing and sync_now flag (#105024) --- docs/changelog/105024.yaml | 6 ++ .../335_connector_update_configuration.yml | 24 ++++++- .../application/connector/Connector.java | 4 +- .../connector/ConnectorConfiguration.java | 70 ++++++++++++------- .../connector/syncjob/ConnectorSyncJob.java | 2 +- .../ConnectorConfigurationTests.java | 40 +++++++++++ .../syncjob/ConnectorSyncJobTests.java | 2 +- 7 files changed, 116 insertions(+), 32 deletions(-) create mode 100644 docs/changelog/105024.yaml diff --git a/docs/changelog/105024.yaml b/docs/changelog/105024.yaml new file mode 100644 index 0000000000000..96268b78ddf5d --- /dev/null +++ b/docs/changelog/105024.yaml @@ -0,0 +1,6 @@ +pr: 105024 +summary: "[Connectors API] Fix bug with crawler configuration parsing and `sync_now`\ + \ flag" +area: Application +type: bug +issues: [] diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml index 5a7ab14dc6386..aeac8202a950b 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml @@ -185,7 +185,6 @@ setup: - field: some_field value: 31 display: numeric - label: Very important field --- "Update Connector Configuration - Unknown field type": @@ -240,3 +239,26 @@ setup: - constraint: 0 type: unknown_constraint value: 123 + +--- +"Update Connector Configuration - Crawler configuration": + - do: + connector.update_configuration: + connector_id: test-connector + body: + configuration: + nextSyncConfig: + label: nextSyncConfig + value: + max_crawl_depth: 3 + sitemap_discovery_disabled: false + seed_urls: + - https://elastic.co/ + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - match: { configuration.nextSyncConfig.value.max_crawl_depth: 3 } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java index db8578e7dfa99..5bae203175d36 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java @@ -710,8 +710,8 @@ public Builder setSyncCursor(Object syncCursor) { return this; } - public Builder setSyncNow(boolean syncNow) { - this.syncNow = syncNow; + public Builder setSyncNow(Boolean syncNow) { + this.syncNow = Objects.requireNonNullElse(syncNow, false); return this; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorConfiguration.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorConfiguration.java index 8ed7c417a1af1..7d7c7b5fa61f9 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorConfiguration.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorConfiguration.java @@ -162,8 +162,8 @@ public ConnectorConfiguration(StreamInput in) throws IOException { .setOptions((List) args[i++]) .setOrder((Integer) args[i++]) .setPlaceholder((String) args[i++]) - .setRequired((boolean) args[i++]) - .setSensitive((boolean) args[i++]) + .setRequired((Boolean) args[i++]) + .setSensitive((Boolean) args[i++]) .setTooltip((String) args[i++]) .setType((ConfigurationFieldType) args[i++]) .setUiRestrictions((List) args[i++]) @@ -187,40 +187,42 @@ public ConnectorConfiguration(StreamInput in) throws IOException { } throw new XContentParseException("Unsupported token [" + p.currentToken() + "]"); }, DEFAULT_VALUE_FIELD, ObjectParser.ValueType.VALUE); - PARSER.declareObjectArray(constructorArg(), (p, c) -> ConfigurationDependency.fromXContent(p), DEPENDS_ON_FIELD); + PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> ConfigurationDependency.fromXContent(p), DEPENDS_ON_FIELD); PARSER.declareField( - constructorArg(), + optionalConstructorArg(), (p, c) -> ConfigurationDisplayType.displayType(p.text()), DISPLAY_FIELD, - ObjectParser.ValueType.STRING + ObjectParser.ValueType.STRING_OR_NULL ); PARSER.declareString(constructorArg(), LABEL_FIELD); - PARSER.declareObjectArray(constructorArg(), (p, c) -> ConfigurationSelectOption.fromXContent(p), OPTIONS_FIELD); + PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> ConfigurationSelectOption.fromXContent(p), OPTIONS_FIELD); PARSER.declareInt(optionalConstructorArg(), ORDER_FIELD); - PARSER.declareString(optionalConstructorArg(), PLACEHOLDER_FIELD); - PARSER.declareBoolean(constructorArg(), REQUIRED_FIELD); - PARSER.declareBoolean(constructorArg(), SENSITIVE_FIELD); + PARSER.declareStringOrNull(optionalConstructorArg(), PLACEHOLDER_FIELD); + PARSER.declareBoolean(optionalConstructorArg(), REQUIRED_FIELD); + PARSER.declareBoolean(optionalConstructorArg(), SENSITIVE_FIELD); PARSER.declareStringOrNull(optionalConstructorArg(), TOOLTIP_FIELD); PARSER.declareField( - constructorArg(), - (p, c) -> ConfigurationFieldType.fieldType(p.text()), + optionalConstructorArg(), + (p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : ConfigurationFieldType.fieldType(p.text()), TYPE_FIELD, - ObjectParser.ValueType.STRING + ObjectParser.ValueType.STRING_OR_NULL ); - PARSER.declareStringArray(constructorArg(), UI_RESTRICTIONS_FIELD); - PARSER.declareObjectArray(constructorArg(), (p, c) -> ConfigurationValidation.fromXContent(p), VALIDATIONS_FIELD); - PARSER.declareField(constructorArg(), (p, c) -> { + PARSER.declareStringArray(optionalConstructorArg(), UI_RESTRICTIONS_FIELD); + PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> ConfigurationValidation.fromXContent(p), VALIDATIONS_FIELD); + PARSER.declareField(optionalConstructorArg(), (p, c) -> { if (p.currentToken() == XContentParser.Token.VALUE_STRING) { return p.text(); } else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { return p.numberValue(); } else if (p.currentToken() == XContentParser.Token.VALUE_BOOLEAN) { return p.booleanValue(); + } else if (p.currentToken() == XContentParser.Token.START_OBJECT) { + return p.map(); } else if (p.currentToken() == XContentParser.Token.VALUE_NULL) { return null; } throw new XContentParseException("Unsupported token [" + p.currentToken() + "]"); - }, VALUE_FIELD, ObjectParser.ValueType.VALUE); + }, VALUE_FIELD, ObjectParser.ValueType.VALUE_OBJECT_ARRAY); } @Override @@ -231,10 +233,16 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(CATEGORY_FIELD.getPreferredName(), category); } builder.field(DEFAULT_VALUE_FIELD.getPreferredName(), defaultValue); - builder.xContentList(DEPENDS_ON_FIELD.getPreferredName(), dependsOn); - builder.field(DISPLAY_FIELD.getPreferredName(), display.toString()); + if (dependsOn != null) { + builder.xContentList(DEPENDS_ON_FIELD.getPreferredName(), dependsOn); + } + if (display != null) { + builder.field(DISPLAY_FIELD.getPreferredName(), display.toString()); + } builder.field(LABEL_FIELD.getPreferredName(), label); - builder.xContentList(OPTIONS_FIELD.getPreferredName(), options); + if (options != null) { + builder.xContentList(OPTIONS_FIELD.getPreferredName(), options); + } if (order != null) { builder.field(ORDER_FIELD.getPreferredName(), order); } @@ -243,10 +251,18 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } builder.field(REQUIRED_FIELD.getPreferredName(), required); builder.field(SENSITIVE_FIELD.getPreferredName(), sensitive); - builder.field(TOOLTIP_FIELD.getPreferredName(), tooltip); - builder.field(TYPE_FIELD.getPreferredName(), type.toString()); - builder.stringListField(UI_RESTRICTIONS_FIELD.getPreferredName(), uiRestrictions); - builder.xContentList(VALIDATIONS_FIELD.getPreferredName(), validations); + if (tooltip != null) { + builder.field(TOOLTIP_FIELD.getPreferredName(), tooltip); + } + if (type != null) { + builder.field(TYPE_FIELD.getPreferredName(), type.toString()); + } + if (uiRestrictions != null) { + builder.stringListField(UI_RESTRICTIONS_FIELD.getPreferredName(), uiRestrictions); + } + if (validations != null) { + builder.xContentList(VALIDATIONS_FIELD.getPreferredName(), validations); + } builder.field(VALUE_FIELD.getPreferredName(), value); } builder.endObject(); @@ -385,13 +401,13 @@ public Builder setPlaceholder(String placeholder) { return this; } - public Builder setRequired(boolean required) { - this.required = required; + public Builder setRequired(Boolean required) { + this.required = Objects.requireNonNullElse(required, false); return this; } - public Builder setSensitive(boolean sensitive) { - this.sensitive = sensitive; + public Builder setSensitive(Boolean sensitive) { + this.sensitive = Objects.requireNonNullElse(sensitive, false); return this; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java index fb34035e5400b..c531187dbb0a0 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java @@ -322,7 +322,7 @@ public ConnectorSyncJob(StreamInput in) throws IOException { STATUS_FIELD, ObjectParser.ValueType.STRING ); - PARSER.declareLong(constructorArg(), TOTAL_DOCUMENT_COUNT_FIELD); + PARSER.declareLongOrNull(constructorArg(), 0L, TOTAL_DOCUMENT_COUNT_FIELD); PARSER.declareField( constructorArg(), (p, c) -> ConnectorSyncJobTriggerMethod.fromString(p.text()), diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorConfigurationTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorConfigurationTests.java index 9b1f9c60d1607..35b21ce676a57 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorConfigurationTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorConfigurationTests.java @@ -85,6 +85,46 @@ public void testToXContent() throws IOException { assertToXContentEquivalent(originalBytes, toXContent(parsed, XContentType.JSON, humanReadable), XContentType.JSON); } + public void testToXContentCrawlerConfig_WithNullValue() throws IOException { + String content = XContentHelper.stripWhitespace(""" + { + "label": "nextSyncConfig", + "value": null + } + """); + + ConnectorConfiguration configuration = ConnectorConfiguration.fromXContentBytes(new BytesArray(content), XContentType.JSON); + boolean humanReadable = true; + BytesReference originalBytes = toShuffledXContent(configuration, XContentType.JSON, ToXContent.EMPTY_PARAMS, humanReadable); + ConnectorConfiguration parsed; + try (XContentParser parser = createParser(XContentType.JSON.xContent(), originalBytes)) { + parsed = ConnectorConfiguration.fromXContent(parser); + } + assertToXContentEquivalent(originalBytes, toXContent(parsed, XContentType.JSON, humanReadable), XContentType.JSON); + } + + public void testToXContentCrawlerConfig_WithCrawlerConfigurationOverrides() throws IOException { + String content = XContentHelper.stripWhitespace(""" + { + "label": "nextSyncConfig", + "value": { + "max_crawl_depth": 3, + "sitemap_discovery_disabled": false, + "seed_urls": ["https://elastic.co/"] + } + } + """); + + ConnectorConfiguration configuration = ConnectorConfiguration.fromXContentBytes(new BytesArray(content), XContentType.JSON); + boolean humanReadable = true; + BytesReference originalBytes = toShuffledXContent(configuration, XContentType.JSON, ToXContent.EMPTY_PARAMS, humanReadable); + ConnectorConfiguration parsed; + try (XContentParser parser = createParser(XContentType.JSON.xContent(), originalBytes)) { + parsed = ConnectorConfiguration.fromXContent(parser); + } + assertToXContentEquivalent(originalBytes, toXContent(parsed, XContentType.JSON, humanReadable), XContentType.JSON); + } + public void testToXContentWithMultipleConstraintTypes() throws IOException { String content = XContentHelper.stripWhitespace(""" { diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java index 7b1a0f7d8dcf7..81b05ce25e177 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java @@ -235,7 +235,7 @@ public void testFromXContent_WithAllNullableFieldsSetToNull_DoesNotThrow() throw "metadata": {}, "started_at": null, "status": "canceling", - "total_document_count": 0, + "total_document_count": null, "trigger_method": "scheduled", "worker_hostname": null } From a34174c2244b93d50687ac89667f6216b5631da7 Mon Sep 17 00:00:00 2001 From: Michael Peterson Date: Mon, 5 Feb 2024 08:44:43 -0500 Subject: [PATCH 021/106] Query timeouts should not be return 500 INTERNAL_SERVER_ERROR status code (#104868) Created new Exception QueryPhaseTimeoutException, which returns RestStatus 504. We considered the 408 status code, but decided that the official spec for that status doesn't match this scenario, so 504 was considered the closest fit. --- .../elasticsearch/ElasticsearchException.java | 7 ++++ .../org/elasticsearch/TransportVersions.java | 1 + .../search/query/QueryPhase.java | 2 +- .../query/QueryPhaseTimeoutException.java | 34 +++++++++++++++++++ .../ExceptionSerializationTests.java | 2 ++ 5 files changed, 45 insertions(+), 1 deletion(-) create mode 100644 server/src/main/java/org/elasticsearch/search/query/QueryPhaseTimeoutException.java diff --git a/server/src/main/java/org/elasticsearch/ElasticsearchException.java b/server/src/main/java/org/elasticsearch/ElasticsearchException.java index 4f29fb3a168b3..656d213e7a1fd 100644 --- a/server/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/server/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -37,6 +37,7 @@ import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.MultiBucketConsumerService; import org.elasticsearch.search.aggregations.UnsupportedAggregationOnDownsampledIndex; +import org.elasticsearch.search.query.QueryPhaseTimeoutException; import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentFragment; @@ -1896,6 +1897,12 @@ private enum ElasticsearchExceptionHandle { AutoscalingMissedIndicesUpdateException::new, 175, TransportVersions.MISSED_INDICES_UPDATE_EXCEPTION_ADDED + ), + QUERY_PHASE_TIMEOUT_EXCEPTION( + QueryPhaseTimeoutException.class, + QueryPhaseTimeoutException::new, + 176, + TransportVersions.QUERY_PHASE_TIMEOUT_EXCEPTION_ADDED ); final Class exceptionClass; diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 014e1a71d7a0e..cd7f9eb756b91 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -170,6 +170,7 @@ static TransportVersion def(int id) { public static final TransportVersion KNN_QUERY_NUMCANDS_AS_OPTIONAL_PARAM = def(8_583_00_0); public static final TransportVersion TRANSFORM_GET_BASIC_STATS = def(8_584_00_0); public static final TransportVersion NLP_DOCUMENT_CHUNKING_ADDED = def(8_585_00_0); + public static final TransportVersion QUERY_PHASE_TIMEOUT_EXCEPTION_ADDED = def(8_586_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java index 01015ec8cc78e..2368eeb18b021 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -210,7 +210,7 @@ static void addCollectorsAndSearch(SearchContext searchContext) throws QueryPhas if (searcher.timeExceeded()) { assert timeoutRunnable != null : "TimeExceededException thrown even though timeout wasn't set"; if (searchContext.request().allowPartialSearchResults() == false) { - throw new QueryPhaseExecutionException(searchContext.shardTarget(), "Time exceeded"); + throw new QueryPhaseTimeoutException(searchContext.shardTarget(), "Time exceeded"); } queryResult.searchTimedOut(true); } diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhaseTimeoutException.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhaseTimeoutException.java new file mode 100644 index 0000000000000..1b41f31ea1c82 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhaseTimeoutException.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.query; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.SearchShardTarget; + +import java.io.IOException; + +/** + * Specific instance of QueryPhaseExecutionException that indicates that a search timeout occurred. + * Always returns http status 504 (Gateway Timeout) + */ +public class QueryPhaseTimeoutException extends QueryPhaseExecutionException { + public QueryPhaseTimeoutException(SearchShardTarget shardTarget, String msg) { + super(shardTarget, msg); + } + + public QueryPhaseTimeoutException(StreamInput in) throws IOException { + super(in); + } + + @Override + public RestStatus status() { + return RestStatus.GATEWAY_TIMEOUT; + } +} diff --git a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index 3e0d9193ffed9..9d5c47fbccbc6 100644 --- a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -80,6 +80,7 @@ import org.elasticsearch.search.aggregations.MultiBucketConsumerService; import org.elasticsearch.search.aggregations.UnsupportedAggregationOnDownsampledIndex; import org.elasticsearch.search.internal.ShardSearchContextId; +import org.elasticsearch.search.query.QueryPhaseTimeoutException; import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotException; import org.elasticsearch.snapshots.SnapshotId; @@ -827,6 +828,7 @@ public void testIds() { ids.put(173, TooManyScrollContextsException.class); ids.put(174, AggregationExecutionException.InvalidPath.class); ids.put(175, AutoscalingMissedIndicesUpdateException.class); + ids.put(176, QueryPhaseTimeoutException.class); Map, Integer> reverse = new HashMap<>(); for (Map.Entry> entry : ids.entrySet()) { From 6054ca36cfa79a1943afde26641240e817239158 Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Mon, 5 Feb 2024 15:47:22 +0100 Subject: [PATCH 022/106] [Connector API] Support filtering by name, index name in list action (#105131) --- docs/changelog/105131.yaml | 5 ++ .../rest-api-spec/api/connector.list.json | 8 +++ .../test/entsearch/310_connector_list.yml | 64 ++++++++++++++++++- .../connector/ConnectorIndexService.java | 41 +++++++++++- .../connector/action/ListConnectorAction.java | 56 ++++++++++++++-- .../action/RestListConnectorAction.java | 6 +- .../action/TransportListConnectorAction.java | 3 + .../connector/ConnectorIndexServiceTests.java | 5 +- ...ectorActionRequestBWCSerializingTests.java | 9 ++- 9 files changed, 180 insertions(+), 17 deletions(-) create mode 100644 docs/changelog/105131.yaml diff --git a/docs/changelog/105131.yaml b/docs/changelog/105131.yaml new file mode 100644 index 0000000000000..36993527da583 --- /dev/null +++ b/docs/changelog/105131.yaml @@ -0,0 +1,5 @@ +pr: 105131 +summary: "[Connector API] Support filtering by name, index name in list action" +area: Application +type: enhancement +issues: [] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.list.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.list.json index bc8f12a933b1e..562190f6f5cad 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.list.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.list.json @@ -31,6 +31,14 @@ "type": "int", "default": 100, "description": "specifies a max number of results to get (default: 100)" + }, + "index_name": { + "type": "string", + "description": "connector index name(s) to fetch connector documents for" + }, + "connector_name": { + "type": "string", + "description": "connector name(s) to fetch connector documents for" } } } diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/310_connector_list.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/310_connector_list.yml index 52cfcdee0bb85..7aa49297902d5 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/310_connector_list.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/310_connector_list.yml @@ -9,7 +9,7 @@ setup: connector_id: connector-a body: index_name: search-1-test - name: my-connector + name: my-connector-1 language: pl is_native: false service_type: super-connector @@ -18,7 +18,7 @@ setup: connector_id: connector-c body: index_name: search-3-test - name: my-connector + name: my-connector-3 language: nl is_native: false service_type: super-connector @@ -27,7 +27,7 @@ setup: connector_id: connector-b body: index_name: search-2-test - name: my-connector + name: my-connector-2 language: en is_native: true service_type: super-connector @@ -106,3 +106,61 @@ setup: - match: { count: 0 } + +--- +"List Connector - filter by index names": + - do: + connector.list: + index_name: search-1-test + + - match: { count: 1 } + - match: { results.0.index_name: "search-1-test" } + + - do: + connector.list: + index_name: search-1-test,search-2-test + + - match: { count: 2 } + - match: { results.0.index_name: "search-1-test" } + - match: { results.1.index_name: "search-2-test" } + + +--- +"List Connector - filter by index names, illegal name": + - do: + catch: "bad_request" + connector.list: + index_name: ~.!$$#index-name$$$ + + +--- +"List Connector - filter by connector names": + - do: + connector.list: + connector_name: my-connector-1 + + - match: { count: 1 } + - match: { results.0.name: "my-connector-1" } + + - do: + connector.list: + connector_name: my-connector-1,my-connector-2 + + - match: { count: 2 } + - match: { results.0.name: "my-connector-1" } + - match: { results.1.name: "my-connector-2" } + + +--- +"List Connector - filter by index name and name": + - do: + connector.list: + connector_name: my-connector-1,my-connector-2 + index_name: search-2-test + + - match: { count: 1 } + - match: { results.0.index_name: "search-2-test" } + - match: { results.0.name: "my-connector-2" } + + + diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index fe810bc4ca783..b321a497ab58d 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -29,7 +29,9 @@ import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.IdsQueryBuilder; import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; @@ -280,13 +282,21 @@ public void deleteConnector(String connectorId, ActionListener l * * @param from From index to start the search from. * @param size The maximum number of {@link Connector}s to return. + * @param indexNames A list of index names to filter the connectors. + * @param connectorNames A list of connector names to further filter the search results. * @param listener The action listener to invoke on response/failure. */ - public void listConnectors(int from, int size, ActionListener listener) { + public void listConnectors( + int from, + int size, + List indexNames, + List connectorNames, + ActionListener listener + ) { try { final SearchSourceBuilder source = new SearchSourceBuilder().from(from) .size(size) - .query(new MatchAllQueryBuilder()) + .query(buildListQuery(indexNames, connectorNames)) .fetchSource(true) .sort(Connector.INDEX_NAME_FIELD.getPreferredName(), SortOrder.ASC); final SearchRequest req = new SearchRequest(CONNECTOR_INDEX_NAME).source(source); @@ -314,6 +324,33 @@ public void onFailure(Exception e) { } } + /** + * Constructs a query for filtering instances of {@link Connector} based on index and/or connector names. + * Returns a {@link MatchAllQueryBuilder} if both parameters are empty or null, + * otherwise constructs a boolean query to filter by the provided lists. + * + * @param indexNames List of index names to filter by, or null/empty for no index name filtering. + * @param connectorNames List of connector names to filter by, or null/empty for no name filtering. + * @return A {@link QueryBuilder} tailored to the specified filters. + */ + private QueryBuilder buildListQuery(List indexNames, List connectorNames) { + boolean filterByIndexNames = indexNames != null && indexNames.isEmpty() == false; + boolean filterByConnectorNames = indexNames != null && connectorNames.isEmpty() == false; + boolean usesFilter = filterByIndexNames || filterByConnectorNames; + + BoolQueryBuilder boolFilterQueryBuilder = new BoolQueryBuilder(); + + if (usesFilter) { + if (filterByIndexNames) { + boolFilterQueryBuilder.must().add(new TermsQueryBuilder(Connector.INDEX_NAME_FIELD.getPreferredName(), indexNames)); + } + if (filterByConnectorNames) { + boolFilterQueryBuilder.must().add(new TermsQueryBuilder(Connector.NAME_FIELD.getPreferredName(), connectorNames)); + } + } + return usesFilter ? boolFilterQueryBuilder : new MatchAllQueryBuilder(); + } + /** * Updates the {@link ConnectorConfiguration} property of a {@link Connector}. * The update process is non-additive; it completely replaces all existing configuration fields with the new configuration mapping, diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/ListConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/ListConnectorAction.java index b4a3a2c0d3632..13a588fdd6314 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/ListConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/ListConnectorAction.java @@ -11,8 +11,10 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; +import org.elasticsearch.cluster.metadata.MetadataCreateIndexService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.indices.InvalidIndexNameException; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; @@ -26,7 +28,9 @@ import java.util.List; import java.util.Objects; +import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; public class ListConnectorAction { @@ -38,54 +42,88 @@ private ListConnectorAction() {/* no instances */} public static class Request extends ActionRequest implements ToXContentObject { private final PageParams pageParams; + private final List indexNames; + private final List connectorNames; private static final ParseField PAGE_PARAMS_FIELD = new ParseField("pageParams"); + private static final ParseField INDEX_NAMES_FIELD = new ParseField("index_names"); + private static final ParseField NAMES_FIELD = new ParseField("names"); public Request(StreamInput in) throws IOException { super(in); this.pageParams = new PageParams(in); + this.indexNames = in.readOptionalStringCollectionAsList(); + this.connectorNames = in.readOptionalStringCollectionAsList(); } - public Request(PageParams pageParams) { + public Request(PageParams pageParams, List indexNames, List connectorNames) { this.pageParams = pageParams; + this.indexNames = indexNames; + this.connectorNames = connectorNames; } public PageParams getPageParams() { return pageParams; } + public List getIndexNames() { + return indexNames; + } + + public List getConnectorNames() { + return connectorNames; + } + @Override public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; // Pagination validation is done as part of PageParams constructor - return null; + + if (indexNames != null && indexNames.isEmpty() == false) { + for (String indexName : indexNames) { + try { + MetadataCreateIndexService.validateIndexOrAliasName(indexName, InvalidIndexNameException::new); + } catch (InvalidIndexNameException e) { + validationException = addValidationError(e.toString(), validationException); + } + } + } + return validationException; } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); pageParams.writeTo(out); + out.writeOptionalStringCollection(indexNames); + out.writeOptionalStringCollection(connectorNames); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - ListConnectorAction.Request that = (ListConnectorAction.Request) o; - return Objects.equals(pageParams, that.pageParams); + ListConnectorAction.Request request = (ListConnectorAction.Request) o; + return Objects.equals(pageParams, request.pageParams) + && Objects.equals(indexNames, request.indexNames) + && Objects.equals(connectorNames, request.connectorNames); } @Override public int hashCode() { - return Objects.hash(pageParams); + return Objects.hash(pageParams, indexNames, connectorNames); } + @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "list_connector_request", - p -> new ListConnectorAction.Request((PageParams) p[0]) + p -> new ListConnectorAction.Request((PageParams) p[0], (List) p[1], (List) p[2]) ); static { PARSER.declareObject(constructorArg(), (p, c) -> PageParams.fromXContent(p), PAGE_PARAMS_FIELD); + PARSER.declareStringArray(optionalConstructorArg(), INDEX_NAMES_FIELD); + PARSER.declareStringArray(optionalConstructorArg(), NAMES_FIELD); } public static ListConnectorAction.Request parse(XContentParser parser) { @@ -95,7 +133,11 @@ public static ListConnectorAction.Request parse(XContentParser parser) { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(PAGE_PARAMS_FIELD.getPreferredName(), pageParams); + { + builder.field(PAGE_PARAMS_FIELD.getPreferredName(), pageParams); + builder.field(INDEX_NAMES_FIELD.getPreferredName(), indexNames); + builder.field(NAMES_FIELD.getPreferredName(), connectorNames); + } builder.endObject(); return builder; } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestListConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestListConnectorAction.java index 9c37e31944ac8..90232b340719d 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestListConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestListConnectorAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.application.EnterpriseSearch; +import org.elasticsearch.xpack.application.connector.Connector; import org.elasticsearch.xpack.core.action.util.PageParams; import java.io.IOException; @@ -38,7 +39,10 @@ public List routes() { protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { int from = restRequest.paramAsInt("from", PageParams.DEFAULT_FROM); int size = restRequest.paramAsInt("size", PageParams.DEFAULT_SIZE); - ListConnectorAction.Request request = new ListConnectorAction.Request(new PageParams(from, size)); + List indexNames = List.of(restRequest.paramAsStringArray(Connector.INDEX_NAME_FIELD.getPreferredName(), new String[0])); + List connectorNames = List.of(restRequest.paramAsStringArray("connector_name", new String[0])); + + ListConnectorAction.Request request = new ListConnectorAction.Request(new PageParams(from, size), indexNames, connectorNames); return channel -> client.execute(ListConnectorAction.INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportListConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportListConnectorAction.java index cfe05965da37b..03334751c5a42 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportListConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportListConnectorAction.java @@ -42,9 +42,12 @@ public TransportListConnectorAction( @Override protected void doExecute(Task task, ListConnectorAction.Request request, ActionListener listener) { final PageParams pageParams = request.getPageParams(); + connectorIndexService.listConnectors( pageParams.getFrom(), pageParams.getSize(), + request.getIndexNames(), + request.getConnectorNames(), listener.map(r -> new ListConnectorAction.Response(r.connectors(), r.totalResults())) ); } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index be3bb8be7b200..52bfd64db1844 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -534,11 +534,12 @@ public void onFailure(Exception e) { return resp.get(); } - private ConnectorIndexService.ConnectorResult awaitListConnector(int from, int size) throws Exception { + private ConnectorIndexService.ConnectorResult awaitListConnector(int from, int size, List indexNames, List names) + throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); final AtomicReference exc = new AtomicReference<>(null); - connectorIndexService.listConnectors(from, size, new ActionListener<>() { + connectorIndexService.listConnectors(from, size, indexNames, names, new ActionListener<>() { @Override public void onResponse(ConnectorIndexService.ConnectorResult result) { resp.set(result); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/ListConnectorActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/ListConnectorActionRequestBWCSerializingTests.java index b31c3e90b7403..3d2192098d907 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/ListConnectorActionRequestBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/ListConnectorActionRequestBWCSerializingTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.core.ml.AbstractBWCSerializationTestCase; import java.io.IOException; +import java.util.List; public class ListConnectorActionRequestBWCSerializingTests extends AbstractBWCSerializationTestCase { @Override @@ -25,7 +26,11 @@ protected Writeable.Reader instanceReader() { @Override protected ListConnectorAction.Request createTestInstance() { PageParams pageParams = SearchApplicationTestUtils.randomPageParams(); - return new ListConnectorAction.Request(pageParams); + return new ListConnectorAction.Request( + pageParams, + List.of(generateRandomStringArray(10, 10, false)), + List.of(generateRandomStringArray(10, 10, false)) + ); } @Override @@ -40,6 +45,6 @@ protected ListConnectorAction.Request doParseInstance(XContentParser parser) thr @Override protected ListConnectorAction.Request mutateInstanceForVersion(ListConnectorAction.Request instance, TransportVersion version) { - return new ListConnectorAction.Request(instance.getPageParams()); + return new ListConnectorAction.Request(instance.getPageParams(), instance.getIndexNames(), instance.getConnectorNames()); } } From 6ae521bf12612f2df9e0d4a45eab6bcdd7dae06c Mon Sep 17 00:00:00 2001 From: David Kilfoyle <41695641+kilfoyle@users.noreply.github.com> Date: Mon, 5 Feb 2024 10:06:41 -0500 Subject: [PATCH 023/106] [DOCS] Small fixes for the 'Installing Elasticsearch' page (#105034) * [DOCS] Add link to on-prem install tutorial * Move link to bottom of packages section * Rearrange things according to suggestions * Add another link on the 'Install Elasticsearch with RPM' page --- docs/reference/setup/install.asciidoc | 16 ++++++++++++---- docs/reference/setup/install/rpm.asciidoc | 2 ++ 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/docs/reference/setup/install.asciidoc b/docs/reference/setup/install.asciidoc index 858902bb72ef2..49501c46b8ba9 100644 --- a/docs/reference/setup/install.asciidoc +++ b/docs/reference/setup/install.asciidoc @@ -16,8 +16,8 @@ To set up Elasticsearch in {ecloud}, sign up for a {ess-trial}[free {ecloud} tri If you want to install and manage {es} yourself, you can: -* Run {es} on any Linux, MacOS, or Windows machine. -* Run {es} in a <>. +* Run {es} using a <>. +* Run {es} in a <>. * Set up and manage {es}, {kib}, {agent}, and the rest of the Elastic Stack on Kubernetes with {eck-ref}[{eck}]. TIP: To try out Elasticsearch on your own machine, we recommend using Docker and running both Elasticsearch and Kibana. For more information, see <>. @@ -57,10 +57,18 @@ Elasticsearch website or from our RPM repository. + <> +TIP: For a step-by-step example of setting up the {stack} on your own premises, try out our tutorial: {stack-ref}/installing-stack-demo-self.html[Installing a self-managed Elastic Stack]. + +[discrete] +[[elasticsearch-docker-images]] +=== Elasticsearch container images + +You can also run {es} inside a container image. + +[horizontal] `docker`:: -Images are available for running Elasticsearch as Docker containers. They may be -downloaded from the Elastic Docker Registry. +Docker container images may be downloaded from the Elastic Docker Registry. + {ref}/docker.html[Install {es} with Docker] diff --git a/docs/reference/setup/install/rpm.asciidoc b/docs/reference/setup/install/rpm.asciidoc index 8dfbca8c63210..a30c8c313b263 100644 --- a/docs/reference/setup/install/rpm.asciidoc +++ b/docs/reference/setup/install/rpm.asciidoc @@ -19,6 +19,8 @@ NOTE: Elasticsearch includes a bundled version of https://openjdk.java.net[OpenJ from the JDK maintainers (GPLv2+CE). To use your own version of Java, see the <> +TIP: For a step-by-step example of setting up the {stack} on your own premises, try out our tutorial: {stack-ref}/installing-stack-demo-self.html[Installing a self-managed Elastic Stack]. + [[rpm-key]] ==== Import the Elasticsearch GPG Key From 86c1fa2a6c4c1f883b1c7d9d4b6ad1de44826222 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Mon, 5 Feb 2024 07:16:25 -0800 Subject: [PATCH 024/106] Avoid convert to string when parse resp in heap attack (#105109) We've seen cases of OOM errors in the test runner process, which occur when we convert a response to a JSON string and then parse it. We can directly parse from its input stream to avoid these OOM errors. --- .../xpack/esql/heap_attack/HeapAttackIT.java | 42 ++++++++----------- 1 file changed, 18 insertions(+), 24 deletions(-) diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java index 8d4b5ece98993..4e6e149b454e8 100644 --- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.heap_attack; -import org.apache.http.HttpEntity; import org.apache.http.HttpHost; import org.apache.http.client.config.RequestConfig; import org.apache.http.util.EntityUtils; @@ -21,7 +20,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ListMatcher; import org.elasticsearch.test.cluster.ElasticsearchCluster; @@ -31,7 +29,6 @@ import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; import org.junit.After; import org.junit.Before; @@ -87,7 +84,7 @@ public void skipOnAborted() { public void testSortByManyLongsSuccess() throws IOException { initManyLongs(); Response response = sortByManyLongs(2000); - Map map = XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(response.getEntity()), false); + Map map = responseAsMap(response); ListMatcher columns = matchesList().item(matchesMap().entry("name", "a").entry("type", "long")) .item(matchesMap().entry("name", "b").entry("type", "long")); ListMatcher values = matchesList(); @@ -109,7 +106,7 @@ public void testSortByManyLongsTooMuchMemory() throws IOException { private void assertCircuitBreaks(ThrowingRunnable r) throws IOException { ResponseException e = expectThrows(ResponseException.class, r); - Map map = XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(e.getResponse().getEntity()), false); + Map map = responseAsMap(e.getResponse()); logger.info("expected circuit breaker {}", map); assertMap( map, @@ -133,11 +130,8 @@ private Response sortByManyLongs(int count) throws IOException { */ public void testGroupOnSomeLongs() throws IOException { initManyLongs(); - Map map = XContentHelper.convertToMap( - JsonXContent.jsonXContent, - EntityUtils.toString(groupOnManyLongs(200).getEntity()), - false - ); + Response resp = groupOnManyLongs(200); + Map map = responseAsMap(resp); ListMatcher columns = matchesList().item(matchesMap().entry("name", "MAX(a)").entry("type", "long")); ListMatcher values = matchesList().item(List.of(9)); assertMap(map, matchesMap().entry("columns", columns).entry("values", values)); @@ -148,11 +142,8 @@ public void testGroupOnSomeLongs() throws IOException { */ public void testGroupOnManyLongs() throws IOException { initManyLongs(); - Map map = XContentHelper.convertToMap( - JsonXContent.jsonXContent, - EntityUtils.toString(groupOnManyLongs(5000).getEntity()), - false - ); + Response resp = groupOnManyLongs(5000); + Map map = responseAsMap(resp); ListMatcher columns = matchesList().item(matchesMap().entry("name", "MAX(a)").entry("type", "long")); ListMatcher values = matchesList().item(List.of(9)); assertMap(map, matchesMap().entry("columns", columns).entry("values", values)); @@ -180,7 +171,8 @@ private StringBuilder makeManyLongs(int count) { public void testSmallConcat() throws IOException { initSingleDocIndex(); - Map map = XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(concat(2).getEntity()), false); + Response resp = concat(2); + Map map = responseAsMap(resp); ListMatcher columns = matchesList().item(matchesMap().entry("name", "a").entry("type", "long")) .item(matchesMap().entry("name", "str").entry("type", "keyword")); ListMatcher values = matchesList().item(List.of(1, "1".repeat(100))); @@ -190,7 +182,7 @@ public void testSmallConcat() throws IOException { public void testHugeConcat() throws IOException { initSingleDocIndex(); ResponseException e = expectThrows(ResponseException.class, () -> concat(10)); - Map map = XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(e.getResponse().getEntity()), false); + Map map = responseAsMap(e.getResponse()); logger.info("expected request rejected {}", map); assertMap( map, @@ -216,7 +208,8 @@ private Response concat(int evals) throws IOException { */ public void testManyConcat() throws IOException { initManyLongs(); - Map map = XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(manyConcat(300).getEntity()), false); + Response resp = manyConcat(300); + Map map = responseAsMap(resp); ListMatcher columns = matchesList(); for (int s = 0; s < 300; s++) { columns = columns.item(matchesMap().entry("name", "str" + s).entry("type", "keyword")); @@ -267,7 +260,8 @@ private Response manyConcat(int strings) throws IOException { public void testManyEval() throws IOException { initManyLongs(); - Map map = XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(manyEval(1).getEntity()), false); + Response resp = manyEval(1); + Map map = responseAsMap(resp); ListMatcher columns = matchesList(); columns = columns.item(matchesMap().entry("name", "a").entry("type", "long")); columns = columns.item(matchesMap().entry("name", "b").entry("type", "long")); @@ -369,7 +363,7 @@ public void testFetchTooManyBigFields() throws IOException { */ private void fetchManyBigFields(int docs) throws IOException { Response response = query("{\"query\": \"FROM manybigfields | SORT f000 | LIMIT " + docs + "\"}", "columns"); - Map map = XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(response.getEntity()), false); + Map map = responseAsMap(response); ListMatcher columns = matchesList(); for (int f = 0; f < 1000; f++) { columns = columns.item(matchesMap().entry("name", "f" + String.format(Locale.ROOT, "%03d", f)).entry("type", "keyword")); @@ -381,7 +375,7 @@ public void testAggMvLongs() throws IOException { int fieldValues = 100; initMvLongsIndex(1, 3, fieldValues); Response response = aggMvLongs(3); - Map map = XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(response.getEntity()), false); + Map map = responseAsMap(response); ListMatcher columns = matchesList().item(matchesMap().entry("name", "MAX(f00)").entry("type", "long")) .item(matchesMap().entry("name", "f00").entry("type", "long")) .item(matchesMap().entry("name", "f01").entry("type", "long")) @@ -406,7 +400,7 @@ public void testFetchMvLongs() throws IOException { int fields = 100; initMvLongsIndex(100, fields, 1000); Response response = fetchMvLongs(); - Map map = XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(response.getEntity()), false); + Map map = responseAsMap(response); ListMatcher columns = matchesList(); for (int f = 0; f < fields; f++) { columns = columns.item(matchesMap().entry("name", String.format(Locale.ROOT, "f%02d", f)).entry("type", "long")); @@ -570,8 +564,8 @@ public void assertRequestBreakerEmpty() throws Exception { return; } assertBusy(() -> { - HttpEntity entity = adminClient().performRequest(new Request("GET", "/_nodes/stats")).getEntity(); - Map stats = XContentHelper.convertToMap(XContentType.JSON.xContent(), entity.getContent(), false); + Response response = adminClient().performRequest(new Request("GET", "/_nodes/stats")); + Map stats = responseAsMap(response); Map nodes = (Map) stats.get("nodes"); for (Object n : nodes.values()) { Map node = (Map) n; From 617dad5d36ec1233ffb3870e335d3d41e1bb7f61 Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Mon, 5 Feb 2024 09:32:52 -0600 Subject: [PATCH 025/106] Reducing the memory usage of the new IndexRequestBuilder (#105091) --- .../action/index/IndexRequestBuilder.java | 115 +++++------------- .../index/IndexRequestBuilderTests.java | 17 --- 2 files changed, 31 insertions(+), 101 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java index 3b6d07d200e29..7e39bf5875686 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java @@ -8,18 +8,23 @@ package org.elasticsearch.action.index; +import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.WriteRequestBuilder; import org.elasticsearch.action.support.replication.ReplicationRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; +import org.elasticsearch.client.internal.Requests; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.VersionType; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; +import java.io.IOException; import java.util.Map; /** @@ -29,21 +34,8 @@ public class IndexRequestBuilder extends ReplicationRequestBuilder { private String id = null; - /* - * The following variables hold information about the source of the request. Only one of sourceMap, sourceArray, sourceString, - * sourceBytesReference, or sourceBytes can actually be used. When request() is called it makes sure that only one is set. - */ - private Map sourceMap; - private Object[] sourceArray; - private XContentBuilder sourceXContentBuilder; - private String sourceString; + private BytesReference sourceBytesReference; - private byte[] sourceBytes; - // Optionally used with sourceBytes: - private Integer sourceOffset; - // Optionally used with sourceBytes: - private Integer sourceLength; - // Optionally used with sourceMap, sourceArray, sourceString, sourceBytesReference, or sourceBytes: private XContentType sourceContentType; private String pipeline; @@ -101,8 +93,7 @@ public IndexRequestBuilder setSource(BytesReference source, XContentType xConten * @param source The map to index */ public IndexRequestBuilder setSource(Map source) { - this.sourceMap = source; - return this; + return setSource(source, Requests.INDEX_CONTENT_TYPE); } /** @@ -111,9 +102,13 @@ public IndexRequestBuilder setSource(Map source) { * @param source The map to index */ public IndexRequestBuilder setSource(Map source, XContentType contentType) { - this.sourceMap = source; - this.sourceContentType = contentType; - return this; + try { + XContentBuilder builder = XContentFactory.contentBuilder(contentType); + builder.map(source); + return setSource(builder); + } catch (IOException e) { + throw new ElasticsearchGenerationException("Failed to generate", e); + } } /** @@ -123,7 +118,7 @@ public IndexRequestBuilder setSource(Map source, XContentType content * or using the {@link #setSource(byte[], XContentType)}. */ public IndexRequestBuilder setSource(String source, XContentType xContentType) { - this.sourceString = source; + this.sourceBytesReference = new BytesArray(source); this.sourceContentType = xContentType; return this; } @@ -132,7 +127,8 @@ public IndexRequestBuilder setSource(String source, XContentType xContentType) { * Sets the content source to index. */ public IndexRequestBuilder setSource(XContentBuilder sourceBuilder) { - this.sourceXContentBuilder = sourceBuilder; + this.sourceBytesReference = BytesReference.bytes(sourceBuilder); + this.sourceContentType = sourceBuilder.contentType(); return this; } @@ -140,9 +136,7 @@ public IndexRequestBuilder setSource(XContentBuilder sourceBuilder) { * Sets the document to index in bytes form. */ public IndexRequestBuilder setSource(byte[] source, XContentType xContentType) { - this.sourceBytes = source; - this.sourceContentType = xContentType; - return this; + return setSource(source, 0, source.length, xContentType); } /** @@ -155,9 +149,7 @@ public IndexRequestBuilder setSource(byte[] source, XContentType xContentType) { * @param xContentType The type/format of the source */ public IndexRequestBuilder setSource(byte[] source, int offset, int length, XContentType xContentType) { - this.sourceBytes = source; - this.sourceOffset = offset; - this.sourceLength = length; + this.sourceBytesReference = new BytesArray(source, offset, length); this.sourceContentType = xContentType; return this; } @@ -171,11 +163,7 @@ public IndexRequestBuilder setSource(byte[] source, int offset, int length, XCon *

*/ public IndexRequestBuilder setSource(Object... source) { - if (source.length % 2 != 0) { - throw new IllegalArgumentException("The number of object passed must be even but was [" + source.length + "]"); - } - this.sourceArray = source; - return this; + return setSource(Requests.INDEX_CONTENT_TYPE, source); } /** @@ -190,9 +178,17 @@ public IndexRequestBuilder setSource(XContentType xContentType, Object... source if (source.length % 2 != 0) { throw new IllegalArgumentException("The number of object passed must be even but was [" + source.length + "]"); } - this.sourceArray = source; - this.sourceContentType = xContentType; - return this; + try { + XContentBuilder builder = XContentFactory.contentBuilder(xContentType); + builder.startObject(); + for (int i = 0; i < source.length; i++) { + builder.field(source[i++].toString(), source[i]); + } + builder.endObject(); + return setSource(builder); + } catch (IOException e) { + throw new ElasticsearchGenerationException("Failed to generate", e); + } } /** @@ -292,32 +288,9 @@ public IndexRequest request() { IndexRequest request = new IndexRequest(); super.apply(request); request.id(id); - if (sourceMap != null && sourceContentType != null) { - request.source(sourceMap, sourceContentType); - } else if (sourceMap != null) { - request.source(sourceMap); - } - if (sourceArray != null && sourceContentType != null) { - request.source(sourceContentType, sourceArray); - } else if (sourceArray != null) { - request.source(sourceArray); - } - if (sourceXContentBuilder != null) { - request.source(sourceXContentBuilder); - } - if (sourceString != null && sourceContentType != null) { - request.source(sourceString, sourceContentType); - } if (sourceBytesReference != null && sourceContentType != null) { request.source(sourceBytesReference, sourceContentType); } - if (sourceBytes != null && sourceContentType != null) { - if (sourceOffset != null && sourceLength != null) { - request.source(sourceBytes, sourceOffset, sourceLength, sourceContentType); - } else { - request.source(sourceBytes, sourceContentType); - } - } if (pipeline != null) { request.setPipeline(pipeline); } @@ -359,30 +332,4 @@ public IndexRequest request() { } return request; } - - @Override - protected void validate() throws IllegalStateException { - super.validate(); - int sourceFieldsSet = countSourceFieldsSet(); - if (sourceFieldsSet > 1) { - throw new IllegalStateException("Only one setSource() method may be called, but " + sourceFieldsSet + " have been"); - } - } - - /* - * Returns the number of the source fields that are non-null (ideally this will be 1). - */ - private int countSourceFieldsSet() { - return countNonNullObjects(sourceMap, sourceArray, sourceXContentBuilder, sourceString, sourceBytesReference, sourceBytes); - } - - private int countNonNullObjects(Object... objects) { - int sum = 0; - for (Object object : objects) { - if (object != null) { - sum++; - } - } - return sum; - } } diff --git a/server/src/test/java/org/elasticsearch/action/index/IndexRequestBuilderTests.java b/server/src/test/java/org/elasticsearch/action/index/IndexRequestBuilderTests.java index 778bd6a1d138e..9af522524abc9 100644 --- a/server/src/test/java/org/elasticsearch/action/index/IndexRequestBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/action/index/IndexRequestBuilderTests.java @@ -9,7 +9,6 @@ package org.elasticsearch.action.index; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpClient; import org.elasticsearch.threadpool.TestThreadPool; @@ -83,20 +82,4 @@ public void testSetSource() throws Exception { indexRequestBuilder.setSource(doc); assertEquals(EXPECTED_SOURCE, XContentHelper.convertToJson(indexRequestBuilder.request().source(), true)); } - - public void testValidation() { - IndexRequestBuilder indexRequestBuilder = new IndexRequestBuilder(this.testClient); - Map source = new HashMap<>(); - source.put("SomeKey", "SomeValue"); - indexRequestBuilder.setSource(source); - assertNotNull(indexRequestBuilder.request()); - indexRequestBuilder.setSource("SomeKey", "SomeValue"); - expectThrows(IllegalStateException.class, indexRequestBuilder::request); - - indexRequestBuilder = new IndexRequestBuilder(this.testClient); - indexRequestBuilder.setTimeout(randomTimeValue()); - assertNotNull(indexRequestBuilder.request()); - indexRequestBuilder.setTimeout(TimeValue.timeValueSeconds(randomIntBetween(1, 30))); - expectThrows(IllegalStateException.class, indexRequestBuilder::request); - } } From 39eefb3197b4f88b1675c0c5253343d011285bb6 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 5 Feb 2024 17:20:30 +0100 Subject: [PATCH 026/106] Unmute TimeSeriesTsidHashCardinalityIT (#105121) and reduce the number of time series in order to fix test related OOME. Relates to #105104 --- .../bucket/TimeSeriesTsidHashCardinalityIT.java | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/TimeSeriesTsidHashCardinalityIT.java b/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/TimeSeriesTsidHashCardinalityIT.java index 278f905e3900b..97c75689fe5dc 100644 --- a/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/TimeSeriesTsidHashCardinalityIT.java +++ b/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/TimeSeriesTsidHashCardinalityIT.java @@ -8,7 +8,6 @@ package org.elasticsearch.aggregations.bucket; -import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.search.SearchResponse; @@ -44,7 +43,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105104") public class TimeSeriesTsidHashCardinalityIT extends ESSingleNodeTestCase { private static final String START_TIME = "2021-01-01T00:00:00Z"; private static final String END_TIME = "2021-12-31T23:59:59Z"; @@ -72,7 +70,7 @@ public void setUp() throws Exception { afterIndex = randomAlphaOfLength(12).toLowerCase(Locale.ROOT); startTime = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis(START_TIME); endTime = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis(END_TIME); - numTimeSeries = 5_000; + numTimeSeries = 500; // NOTE: we need to use few dimensions to be able to index documents in an index created before introducing TSID hashing numDimensions = randomIntBetween(10, 20); @@ -277,20 +275,14 @@ public String toString() { @Override public Iterator iterator() { - return new TimeSeriesIterator(this.dataset.entrySet()); + return new TimeSeriesIterator(this.dataset.entrySet().iterator()); } public int size() { return this.dataset.size(); } - static class TimeSeriesIterator implements Iterator { - - private final Iterator> it; - - TimeSeriesIterator(final Set> entries) { - this.it = entries.iterator(); - } + record TimeSeriesIterator(Iterator> it) implements Iterator { @Override public boolean hasNext() { From 2932500ce232598043604d2d8a41ee76ccd1df34 Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Mon, 5 Feb 2024 11:57:20 -0500 Subject: [PATCH 027/106] [Transform] return results in order (#105089) * Transform: return results in order Currently, when Transform searches over aggregations, it stores the results in an unordered HashMap. This potentially rearranges search results. For example, if a user specifies an order in a search request, the search response is in that order. But if the search request is embedded in a Transform request, then Transform response will not preserve the order and the result will look different. With this change, Transform will always preserve the order of the search response. A search embedded in a Transform should behave as an unembedded search. Closes #104847 --- docs/changelog/105089.yaml | 6 + .../test/rest/ESRestTestCase.java | 22 ++- .../integration/TransformPivotRestIT.java | 8 +- .../pivot/AggregationResultUtils.java | 51 +++---- .../pivot/AggregationResultUtilsTests.java | 142 ++++++++++++------ 5 files changed, 148 insertions(+), 81 deletions(-) create mode 100644 docs/changelog/105089.yaml diff --git a/docs/changelog/105089.yaml b/docs/changelog/105089.yaml new file mode 100644 index 0000000000000..6f43c58af8a41 --- /dev/null +++ b/docs/changelog/105089.yaml @@ -0,0 +1,6 @@ +pr: 105089 +summary: Return results in order +area: Transform +type: bug +issues: + - 104847 diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index a3427b3778b0a..1860283515c9d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -1899,20 +1899,36 @@ protected static Map getAlias(final String index, final String a } protected static Map getAsMap(final String endpoint) throws IOException { - return getAsMap(client(), endpoint); + return getAsMap(client(), endpoint, false); + } + + protected static Map getAsOrderedMap(final String endpoint) throws IOException { + return getAsMap(client(), endpoint, true); } protected static Map getAsMap(RestClient client, final String endpoint) throws IOException { + return getAsMap(client, endpoint, false); + } + + private static Map getAsMap(RestClient client, final String endpoint, final boolean ordered) throws IOException { Response response = client.performRequest(new Request("GET", endpoint)); - return responseAsMap(response); + return responseAsMap(response, ordered); } protected static Map responseAsMap(Response response) throws IOException { + return responseAsMap(response, false); + } + + protected static Map responseAsOrderedMap(Response response) throws IOException { + return responseAsMap(response, true); + } + + private static Map responseAsMap(Response response, boolean ordered) throws IOException { XContentType entityContentType = XContentType.fromMediaType(response.getEntity().getContentType().getValue()); Map responseEntity = XContentHelper.convertToMap( entityContentType.xContent(), response.getEntity().getContent(), - false + ordered ); assertNotNull(responseEntity); return responseEntity; diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformPivotRestIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformPivotRestIT.java index 6aeca79b4aa17..50b078730063d 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformPivotRestIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformPivotRestIT.java @@ -28,6 +28,7 @@ import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInRelativeOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; @@ -920,7 +921,7 @@ public void testPivotWithTermsAgg() throws Exception { assertEquals(3, XContentMapValues.extractValue("_all.total.docs.count", indexStats)); // get and check some term results - Map searchResult = getAsMap(transformIndex + "/_search?q=every_2:2.0"); + Map searchResult = getAsOrderedMap(transformIndex + "/_search?q=every_2:2.0"); assertEquals(1, XContentMapValues.extractValue("hits.total.value", searchResult)); Map commonUsers = (Map) ((List) XContentMapValues.extractValue( @@ -944,9 +945,8 @@ public void testPivotWithTermsAgg() throws Exception { searchResult )).get(0); assertThat(commonUsersDesc, is(not(nullValue()))); - // 3 user names latest in lexicographic order (user_7, user_8, user_9) are selected properly but their order is not preserved. - // See https://github.com/elastic/elasticsearch/issues/104847 for more information. - assertThat(commonUsersDesc, equalTo(Map.of("user_7", 6, "user_9", 2, "user_8", 8))); + // 3 user names latest in lexicographic order (user_9, user_8, user_7) are selected properly and their order is preserved. + assertThat(commonUsersDesc.keySet(), containsInRelativeOrder("user_9", "user_8", "user_7")); Map rareUsers = (Map) ((List) XContentMapValues.extractValue( "hits.hits._source.rare_users", searchResult diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtils.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtils.java index 1c6c411020d49..a851e4a47f1cc 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtils.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtils.java @@ -10,6 +10,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Numbers; import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.geometry.Rectangle; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.search.aggregations.Aggregation; @@ -38,10 +39,10 @@ import org.elasticsearch.xpack.transform.transforms.IDGenerator; import org.elasticsearch.xpack.transform.utils.OutputFieldNameConverter; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; @@ -106,7 +107,7 @@ public static Stream> extractCompositeAggregationResults( progress.incrementDocsProcessed(bucket.getDocCount()); progress.incrementDocsIndexed(1L); - Map document = new HashMap<>(); + Map document = new LinkedHashMap<>(); // generator to create unique but deterministic document ids, so we // - do not create duplicates if we re-run after failure // - update documents @@ -223,7 +224,7 @@ static void updateDocument(Map document, String fieldName, Objec throw new AggregationExtractionException("mixed object types of nested and non-nested fields [{}]", fieldName); } } else { - Map newMap = new HashMap<>(); + Map newMap = new LinkedHashMap<>(); internalMap.put(token, newMap); internalMap = newMap; } @@ -284,7 +285,7 @@ static class MultiValueAggExtractor implements AggValueExtractor { @Override public Object value(Aggregation agg, Map fieldTypeMap, String lookupFieldPrefix) { MultiValueAggregation aggregation = (MultiValueAggregation) agg; - Map extracted = new HashMap<>(); + Map extracted = new LinkedHashMap<>(); for (String valueName : aggregation.valueNames()) { List valueAsStrings = aggregation.getValuesAsStrings(valueName); @@ -302,7 +303,7 @@ static class NumericMultiValueAggExtractor implements AggValueExtractor { @Override public Object value(Aggregation agg, Map fieldTypeMap, String lookupFieldPrefix) { MultiValue aggregation = (MultiValue) agg; - Map extracted = new HashMap<>(); + Map extracted = new LinkedHashMap<>(); String fieldLookupPrefix = (lookupFieldPrefix.isEmpty() ? agg.getName() : lookupFieldPrefix + "." + agg.getName()) + "."; for (String valueName : aggregation.valueNames()) { @@ -322,7 +323,7 @@ static class PercentilesAggExtractor implements AggValueExtractor { @Override public Object value(Aggregation agg, Map fieldTypeMap, String lookupFieldPrefix) { Percentiles aggregation = (Percentiles) agg; - HashMap percentiles = new HashMap<>(); + Map percentiles = new LinkedHashMap<>(); for (Percentile p : aggregation) { // in case of sparse data percentiles might not have data, in this case it returns NaN, @@ -360,16 +361,10 @@ public Object value(Aggregation agg, Map fieldTypeMap, String lo return aggregation.getDocCount(); } - HashMap nested = new HashMap<>(); + var subAggLookupFieldPrefix = lookupFieldPrefix.isEmpty() ? agg.getName() : lookupFieldPrefix + "." + agg.getName(); + Map nested = new LinkedHashMap<>(); for (Aggregation subAgg : aggregation.getAggregations()) { - nested.put( - subAgg.getName(), - getExtractor(subAgg).value( - subAgg, - fieldTypeMap, - lookupFieldPrefix.isEmpty() ? agg.getName() : lookupFieldPrefix + "." + agg.getName() - ) - ); + nested.put(subAgg.getName(), getExtractor(subAgg).value(subAgg, fieldTypeMap, subAggLookupFieldPrefix)); } return nested; @@ -392,23 +387,17 @@ static class MultiBucketsAggExtractor implements AggValueExtractor { public Object value(Aggregation agg, Map fieldTypeMap, String lookupFieldPrefix) { MultiBucketsAggregation aggregation = (MultiBucketsAggregation) agg; - HashMap nested = new HashMap<>(); + var subAggLookupFieldPrefix = lookupFieldPrefix.isEmpty() ? agg.getName() : lookupFieldPrefix + "." + agg.getName(); + Map nested = Maps.newLinkedHashMapWithExpectedSize(aggregation.getBuckets().size()); for (MultiBucketsAggregation.Bucket bucket : aggregation.getBuckets()) { String bucketKey = bucketKeyTransfomer.apply(bucket.getKeyAsString()); if (bucket.getAggregations().iterator().hasNext() == false) { nested.put(bucketKey, bucket.getDocCount()); } else { - HashMap nestedBucketObject = new HashMap<>(); + Map nestedBucketObject = new LinkedHashMap<>(); for (Aggregation subAgg : bucket.getAggregations()) { - nestedBucketObject.put( - subAgg.getName(), - getExtractor(subAgg).value( - subAgg, - fieldTypeMap, - lookupFieldPrefix.isEmpty() ? agg.getName() : lookupFieldPrefix + "." + agg.getName() - ) - ); + nestedBucketObject.put(subAgg.getName(), getExtractor(subAgg).value(subAgg, fieldTypeMap, subAggLookupFieldPrefix)); } nested.put(bucketKey, nestedBucketObject); } @@ -441,18 +430,18 @@ public Object value(Aggregation agg, Map fieldTypeMap, String lo if (aggregation.bottomRight() == null || aggregation.topLeft() == null) { return null; } - final Map geoShape = new HashMap<>(); + final Map geoShape = new LinkedHashMap<>(); // If the two geo_points are equal, it is a point if (aggregation.topLeft().equals(aggregation.bottomRight())) { geoShape.put(FIELD_TYPE, POINT); - geoShape.put(FIELD_COORDINATES, Arrays.asList(aggregation.topLeft().getLon(), aggregation.bottomRight().getLat())); + geoShape.put(FIELD_COORDINATES, List.of(aggregation.topLeft().getLon(), aggregation.bottomRight().getLat())); // If only the lat or the lon of the two geo_points are equal, than we know it should be a line } else if (Double.compare(aggregation.topLeft().getLat(), aggregation.bottomRight().getLat()) == 0 || Double.compare(aggregation.topLeft().getLon(), aggregation.bottomRight().getLon()) == 0) { geoShape.put(FIELD_TYPE, LINESTRING); geoShape.put( FIELD_COORDINATES, - Arrays.asList( + List.of( new Double[] { aggregation.topLeft().getLon(), aggregation.topLeft().getLat() }, new Double[] { aggregation.bottomRight().getLon(), aggregation.bottomRight().getLat() } ) @@ -465,7 +454,7 @@ public Object value(Aggregation agg, Map fieldTypeMap, String lo geoShape.put( FIELD_COORDINATES, Collections.singletonList( - Arrays.asList( + List.of( new Double[] { tl.getLon(), tl.getLat() }, new Double[] { br.getLon(), tl.getLat() }, new Double[] { br.getLon(), br.getLat() }, @@ -495,12 +484,12 @@ static class GeoTileBucketKeyExtractor implements BucketKeyExtractor { public Object value(Object key, String type) { assert key instanceof String; Rectangle rectangle = GeoTileUtils.toBoundingBox(key.toString()); - final Map geoShape = new HashMap<>(); + final Map geoShape = Maps.newLinkedHashMapWithExpectedSize(2); geoShape.put(FIELD_TYPE, POLYGON); geoShape.put( FIELD_COORDINATES, Collections.singletonList( - Arrays.asList( + List.of( new Double[] { rectangle.getMaxLon(), rectangle.getMinLat() }, new Double[] { rectangle.getMinLon(), rectangle.getMinLat() }, new Double[] { rectangle.getMinLon(), rectangle.getMaxLat() }, diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtilsTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtilsTests.java index 8d0bd4f9d8019..37e80f7459b2b 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtilsTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtilsTests.java @@ -41,13 +41,18 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformProgress; import org.elasticsearch.xpack.core.transform.transforms.pivot.GroupConfig; import org.elasticsearch.xpack.transform.transforms.pivot.AggregationResultUtils.BucketKeyExtractor; +import org.hamcrest.BaseMatcher; +import org.hamcrest.Description; +import org.hamcrest.Matcher; import java.io.IOException; import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Supplier; import java.util.stream.Collectors; import static org.hamcrest.CoreMatchers.equalTo; @@ -75,7 +80,7 @@ public String getWriteableName() { @Override public List getValuesAsStrings(String name) { - return List.of(values.get(name).toString()); + return List.of(values.get(name)); } @Override @@ -184,7 +189,7 @@ public void testExtractCompositeAggregationResults() throws IOException { asMap(targetField, "ID2", aggName, 28.99), asMap(targetField, "ID3", aggName, null) ); - Map fieldTypeMap = asStringMap(targetField, "keyword", aggName, "double"); + Map fieldTypeMap = Map.of(targetField, "keyword", aggName, "double"); executeTest(groupBy, aggregationBuilders, List.of(), input, fieldTypeMap, expected, 11); } @@ -207,7 +212,6 @@ public void testExtractCompositeAggregationResultsMultipleGroups() throws IOExce }""", targetField, targetField2)); String aggName = randomAlphaOfLengthBetween(5, 10); - String aggTypedName = "avg#" + aggName; List aggregationBuilders = List.of(AggregationBuilders.avg(aggName)); InternalComposite input = createComposite( @@ -241,7 +245,7 @@ public void testExtractCompositeAggregationResultsMultipleGroups() throws IOExce asMap(targetField, "ID2", targetField2, "ID1_2", aggName, 28.99), asMap(targetField, "ID3", targetField2, "ID2_2", aggName, null) ); - Map fieldTypeMap = asStringMap(aggName, "double", targetField, "keyword", targetField2, "keyword"); + Map fieldTypeMap = Map.of(aggName, "double", targetField, "keyword", targetField2, "keyword"); executeTest(groupBy, aggregationBuilders, List.of(), input, fieldTypeMap, expected, 6); } @@ -288,7 +292,7 @@ public void testExtractCompositeAggregationResultsMultiAggregations() throws IOE asMap(targetField, "ID2", aggName, 28.99, aggName2, 222.33), asMap(targetField, "ID3", aggName, 12.55, aggName2, null) ); - Map fieldTypeMap = asStringMap(targetField, "keyword", aggName, "double", aggName2, "double"); + Map fieldTypeMap = Map.of(targetField, "keyword", aggName, "double", aggName2, "double"); executeTest(groupBy, aggregationBuilders, List.of(), input, fieldTypeMap, expected, 200); } @@ -353,7 +357,7 @@ public void testExtractCompositeAggregationResultsMultiAggregationsAndTypes() th asMap(targetField, "ID2", targetField2, "ID1_2", aggName, 28.99, aggName2, "-2.44F"), asMap(targetField, "ID3", targetField2, "ID2_2", aggName, 12.55, aggName2, null) ); - Map fieldTypeMap = asStringMap( + Map fieldTypeMap = Map.of( aggName, "double", aggName2, @@ -419,7 +423,7 @@ public void testExtractCompositeAggregationResultsWithDynamicType() throws IOExc asMap(targetField, "ID2", targetField2, "ID1_2", aggName, asMap("field", 2.13)), asMap(targetField, "ID3", targetField2, "ID2_2", aggName, null) ); - Map fieldTypeMap = asStringMap(targetField, "keyword", targetField2, "keyword"); + Map fieldTypeMap = Map.of(targetField, "keyword", targetField2, "keyword"); executeTest(groupBy, aggregationBuilders, List.of(), input, fieldTypeMap, expected, 6); } @@ -488,7 +492,7 @@ public void testExtractCompositeAggregationResultsWithPipelineAggregation() thro asMap(targetField, "ID2", targetField2, "ID1_2", aggName, 2.13, pipelineAggName, 2.13), asMap(targetField, "ID3", targetField2, "ID2_2", aggName, 12.0, pipelineAggName, null) ); - Map fieldTypeMap = asStringMap(targetField, "keyword", targetField2, "keyword", aggName, "double"); + Map fieldTypeMap = Map.of(targetField, "keyword", targetField2, "keyword", aggName, "double"); executeTest(groupBy, aggregationBuilders, pipelineAggregationBuilders, input, fieldTypeMap, expected, 10); } @@ -566,7 +570,7 @@ public void testExtractCompositeAggregationResultsDocIDs() throws IOException { TransformIndexerStats stats = new TransformIndexerStats(); TransformProgress progress = new TransformProgress(); - Map fieldTypeMap = asStringMap(aggName, "double", targetField, "keyword", targetField2, "keyword"); + Map fieldTypeMap = Map.of(aggName, "double", targetField, "keyword", targetField2, "keyword"); List> resultFirstRun = runExtraction( groupBy, @@ -680,24 +684,21 @@ public void testSingleValueAggExtractor() { public void testMultiValueAggExtractor() { Aggregation agg = new TestMultiValueAggregation("mv_metric", Map.of("ip", "192.168.1.1")); - assertThat( AggregationResultUtils.getExtractor(agg).value(agg, Map.of("mv_metric.ip", "ip"), ""), equalTo(Map.of("ip", "192.168.1.1")) ); agg = new TestMultiValueAggregation("mv_metric", Map.of("top_answer", "fortytwo")); - assertThat( AggregationResultUtils.getExtractor(agg).value(agg, Map.of("mv_metric.written_answer", "written_answer"), ""), equalTo(Map.of("top_answer", "fortytwo")) ); - agg = new TestMultiValueAggregation("mv_metric", Map.of("ip", "192.168.1.1", "top_answer", "fortytwo")); - + agg = new TestMultiValueAggregation("mv_metric", asOrderedMap("ip", "192.168.1.1", "top_answer", "fortytwo")); assertThat( AggregationResultUtils.getExtractor(agg).value(agg, Map.of("mv_metric.top_answer", "keyword", "mv_metric.ip", "ip"), ""), - equalTo(Map.of("top_answer", "fortytwo", "ip", "192.168.1.1")) + hasEqualEntriesInOrder(asOrderedMap("ip", "192.168.1.1", "top_answer", "fortytwo")) ); } @@ -715,22 +716,19 @@ public void testNumericMultiValueAggExtractor() { AggregationResultUtils.getExtractor(agg).value(agg, Map.of("mv_metric.exact_answer", "long"), ""), equalTo(Map.of("exact_answer", 42L)) ); - agg = new TestNumericMultiValueAggregation( "mv_metric", - Map.of("approx_answer", Double.valueOf(42.2), "exact_answer", Double.valueOf(42.0)) + asOrderedMap("approx_answer", Double.valueOf(42.2), "exact_answer", Double.valueOf(42.0)) ); - assertThat( AggregationResultUtils.getExtractor(agg) .value(agg, Map.of("mv_metric.approx_answer", "double", "mv_metric.exact_answer", "long"), ""), - equalTo(Map.of("approx_answer", Double.valueOf(42.2), "exact_answer", Long.valueOf(42))) + hasEqualEntriesInOrder(asOrderedMap("approx_answer", Double.valueOf(42.2), "exact_answer", Long.valueOf(42))) ); - assertThat( AggregationResultUtils.getExtractor(agg) .value(agg, Map.of("filter.mv_metric.approx_answer", "double", "filter.mv_metric.exact_answer", "long"), "filter"), - equalTo(Map.of("approx_answer", 42.2, "exact_answer", Long.valueOf(42))) + hasEqualEntriesInOrder(asOrderedMap("approx_answer", 42.2, "exact_answer", Long.valueOf(42))) ); } @@ -791,13 +789,13 @@ public void testGeoBoundsAggExtractor() { String type = "point"; for (int i = 0; i < numberOfRuns; i++) { - Map expectedObject = new HashMap<>(); - expectedObject.put("type", type); double lat = randomDoubleBetween(-90.0, 90.0, false); double lon = randomDoubleBetween(-180.0, 180.0, false); - expectedObject.put("coordinates", List.of(lon, lat)); agg = createGeoBounds(new GeoPoint(lat, lon), new GeoPoint(lat, lon)); - assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Map.of(), ""), equalTo(expectedObject)); + assertThat( + AggregationResultUtils.getExtractor(agg).value(agg, Map.of(), ""), + hasEqualEntriesInOrder(asOrderedMap("type", type, "coordinates", List.of(lon, lat))) + ); } type = "linestring"; @@ -895,13 +893,16 @@ public void testPercentilesAggExtractor() { ); assertThat( AggregationResultUtils.getExtractor(agg).value(agg, Map.of(), ""), - equalTo(asMap("1", 0.0, "50", 22.2, "99", 43.3, "99_5", 100.3)) + hasEqualEntriesInOrder(asOrderedMap("1", 0.0, "50", 22.2, "99", 43.3, "99_5", 100.3)) ); } public void testPercentilesAggExtractorNaN() { Aggregation agg = createPercentilesAgg("p_agg", List.of(new Percentile(1, Double.NaN), new Percentile(50, Double.NaN))); - assertThat(AggregationResultUtils.getExtractor(agg).value(agg, Map.of(), ""), equalTo(asMap("1", null, "50", null))); + assertThat( + AggregationResultUtils.getExtractor(agg).value(agg, Map.of(), ""), + hasEqualEntriesInOrder(asOrderedMap("1", null, "50", null)) + ); } @SuppressWarnings("unchecked") @@ -928,8 +929,8 @@ public void testRangeAggExtractor() { ); assertThat( AggregationResultUtils.getExtractor(agg).value(agg, Map.of(), ""), - equalTo( - asMap( + hasEqualEntriesInOrder( + asOrderedMap( "*-10_5", 10L, "10_5-19_5", @@ -951,6 +952,56 @@ public void testRangeAggExtractor() { ); } + private static Matcher hasEqualEntriesInOrder(Map expected) { + return new BaseMatcher() { + @Override + public boolean matches(Object o) { + if (o instanceof Map) { + return matches((Map) o); + } + return false; + } + + public boolean matches(Map o) { + var expectedEntries = expected.entrySet().iterator(); + var actualEntries = o.entrySet().iterator(); + while (expectedEntries.hasNext() && actualEntries.hasNext()) { + var expectedEntry = expectedEntries.next(); + var actualEntry = actualEntries.next(); + assertThat( + "Entry is out of order. Expected order: " + + mapToString(expected, expectedEntry) + + ", Actual order: " + + mapToString(o, actualEntry), + actualEntry, + equalTo(expectedEntry) + ); + } + return expectedEntries.hasNext() == false && actualEntries.hasNext() == false; + } + + private String mapToString(Map map, Object node) { + return map.entrySet().stream().map(entry -> { + var entryAsString = entry.getKey() + "=" + entry.getValue(); + if (node == entry) { + return "<<" + entryAsString + ">>"; + } + return entryAsString; + }).collect(Collectors.joining(", ", "{", "}")); + } + + @Override + public void describeTo(Description description) { + description.appendText( + expected.entrySet() + .stream() + .map(entry -> entry.getKey() + "=" + entry.getValue()) + .collect(Collectors.joining(", ", "{", "}")) + ); + } + }; + } + public static InternalSingleBucketAggregation createSingleBucketAgg( String name, long docCount, @@ -982,8 +1033,8 @@ public void testSingleBucketAggExtractor() { createSingleMetricAgg("sub2", 33.33, "thirty_three") ); assertThat( - AggregationResultUtils.getExtractor(agg).value(agg, asStringMap("sba2.sub1", "long", "sba2.sub2", "float"), ""), - equalTo(asMap("sub1", 100L, "sub2", 33.33)) + AggregationResultUtils.getExtractor(agg).value(agg, Map.of("sba2.sub1", "long", "sba2.sub2", "float"), ""), + hasEqualEntriesInOrder(asOrderedMap("sub1", 100L, "sub2", 33.33)) ); agg = createSingleBucketAgg( @@ -994,8 +1045,8 @@ public void testSingleBucketAggExtractor() { createSingleBucketAgg("sub3", 42L) ); assertThat( - AggregationResultUtils.getExtractor(agg).value(agg, asStringMap("sba3.sub1", "long", "sba3.sub2", "double"), ""), - equalTo(asMap("sub1", 100L, "sub2", 33.33, "sub3", 42L)) + AggregationResultUtils.getExtractor(agg).value(agg, Map.of("sba3.sub1", "long", "sba3.sub2", "double"), ""), + hasEqualEntriesInOrder(asOrderedMap("sub1", 100L, "sub2", 33.33, "sub3", 42L)) ); agg = createSingleBucketAgg( @@ -1007,8 +1058,8 @@ public void testSingleBucketAggExtractor() { ); assertThat( AggregationResultUtils.getExtractor(agg) - .value(agg, asStringMap("sba4.sub3.subsub1", "double", "sba4.sub2", "float", "sba4.sub1", "long"), ""), - equalTo(asMap("sub1", 100L, "sub2", 33.33, "sub3", asMap("subsub1", 11.1))) + .value(agg, Map.of("sba4.sub3.subsub1", "double", "sba4.sub2", "float", "sba4.sub1", "long"), ""), + hasEqualEntriesInOrder(asOrderedMap("sub1", 100L, "sub2", 33.33, "sub3", asMap("subsub1", 11.1))) ); } @@ -1094,22 +1145,27 @@ private GroupConfig parseGroupConfig(String json) throws IOException { } static Map asMap(Object... fields) { + return asMap(HashMap::new, fields); + } + + static Map asOrderedMap(Object... fields) { + return asMap(LinkedHashMap::new, fields); + } + + static Map asMap(Supplier> mapFactory, Object... fields) { assert fields.length % 2 == 0; - final Map map = new HashMap<>(); + var map = mapFactory.get(); for (int i = 0; i < fields.length; i += 2) { - String field = (String) fields[i]; + var field = (String) fields[i]; map.put(field, fields[i + 1]); } return map; } - static Map asStringMap(String... strings) { - assert strings.length % 2 == 0; - final Map map = new HashMap<>(); - for (int i = 0; i < strings.length; i += 2) { - String field = strings[i]; - map.put(field, strings[i + 1]); - } + static Map asOrderedMap(K k1, V v1, K k2, V v2) { + var map = new LinkedHashMap(); + map.put(k1, v1); + map.put(k2, v2); return map; } } From 4376bdb2f1660e6467835eb3728516897cb95dfc Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 5 Feb 2024 18:08:08 +0100 Subject: [PATCH 028/106] Adjust skip version for tsdb bwc tests that rely on _id / _tsid (#105144) Yaml tests executed in mixed clusters need to skip clusters that run 8.12.x or earlier versions. The yaml tests assume hashing based time series ids, but if a node in the test cluster is on 8.12.x or earlier, then it can happen pre hashing time series ids are used (depending on the version of the elected master node). Tsdb yaml tests that assert the _id or _tsid should be skipped if there are 8.12.x nodes in the mixed test cluster. Rolling upgrade or full upgrade tests are better for assertion the _id or _tsid in this case, because tests are setup prior to upgrade and pre 8.12.x logic can be asserted in a more controlled way. Closes #105129 --- .../resources/rest-api-spec/test/tsdb/25_id_generation.yml | 2 +- .../resources/rest-api-spec/test/tsdb/30_snapshot.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/25_id_generation.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/25_id_generation.yml index 531879cb67d30..6ef03ba8ebcc4 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/25_id_generation.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/25_id_generation.yml @@ -1,7 +1,7 @@ --- setup: - skip: - version: " - 8.1.99,8.7.00 - 8.12.99" + version: "- 8.12.99" reason: _tsid hashing introduced in 8.13 - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/30_snapshot.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/30_snapshot.yml index ff925ce56a70c..9d27507d0e32b 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/30_snapshot.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/30_snapshot.yml @@ -1,7 +1,7 @@ --- setup: - skip: - version: "8.7.00 - 8.12.99" + version: " - 8.12.99" reason: _tsid hashing introduced in 8.13 - do: From 43362d5de5feead1e925a8bb2cb532fb0eec3521 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Mon, 5 Feb 2024 12:56:13 -0500 Subject: [PATCH 029/106] Add new int8_flat and flat vector index types (#104872) This adds two new vector index types: - flat - int8_flat Both store the vectors in a flat space and search is brute-force over the vectors in the index. For the regular `flat` index, this can be considered syntactic sugar that allows `knn` queries without having to put indices within HNSW. For `int8_flat`, this allows float vectors to be stored in a flat manner, but also automatically quantized. --- docs/changelog/104872.yaml | 5 + .../mapping/types/dense-vector.asciidoc | 20 +- .../search.vectors/42_knn_search_flat.yml | 274 +++++++++++++++++ .../42_knn_search_int8_flat.yml | 289 ++++++++++++++++++ server/src/main/java/module-info.java | 4 + .../codec/vectors/ES813FlatVectorFormat.java | 152 +++++++++ .../vectors/ES813Int8FlatVectorFormat.java | 158 ++++++++++ .../vectors/DenseVectorFieldMapper.java | 96 +++++- .../search/vectors/ESKnnByteVectorQuery.java | 19 ++ .../search/vectors/ESKnnFloatVectorQuery.java | 18 ++ .../org.apache.lucene.codecs.KnnVectorsFormat | 2 + .../vectors/ES813FlatVectorFormatTests.java | 31 ++ .../ES813Int8FlatVectorFormatTests.java | 31 ++ 13 files changed, 1093 insertions(+), 6 deletions(-) create mode 100644 docs/changelog/104872.yaml create mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_flat.yml create mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_int8_flat.yml create mode 100644 server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java create mode 100644 server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java create mode 100644 server/src/main/resources/META-INF/services/org.apache.lucene.codecs.KnnVectorsFormat create mode 100644 server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java create mode 100644 server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java diff --git a/docs/changelog/104872.yaml b/docs/changelog/104872.yaml new file mode 100644 index 0000000000000..ad70946be02ae --- /dev/null +++ b/docs/changelog/104872.yaml @@ -0,0 +1,5 @@ +pr: 104872 +summary: Add new int8_flat and flat vector index types +area: Vector Search +type: enhancement +issues: [] diff --git a/docs/reference/mapping/types/dense-vector.asciidoc b/docs/reference/mapping/types/dense-vector.asciidoc index a2ab44a173a62..d600bc5566ace 100644 --- a/docs/reference/mapping/types/dense-vector.asciidoc +++ b/docs/reference/mapping/types/dense-vector.asciidoc @@ -238,21 +238,31 @@ expense of slower indexing speed. ==== `type`::: (Required, string) -The type of kNN algorithm to use. Can be either `hnsw` or `int8_hnsw`. - +The type of kNN algorithm to use. Can be either any of: ++ +-- +* `hnsw` - The default storage type. This utilizes the https://arxiv.org/abs/1603.09320[HNSW algorithm] for scalable + approximate kNN search. This supports all `element_type` values. +* `int8_hnsw` - This utilizes the https://arxiv.org/abs/1603.09320[HNSW algorithm] in addition to automatically scalar +quantization for scalable approximate kNN search with `element_type` of `float`. This can reduce the memory footprint +by 4x at the cost of some accuracy. See <>. +* `flat` - This utilizes a brute-force search algorithm for exact kNN search. This supports all `element_type` values. +* `int8_flat` - This utilizes a brute-force search algorithm in addition to automatically scalar quantization. Only supports +`element_type` of `float`. +-- `m`::: (Optional, integer) The number of neighbors each node will be connected to in the HNSW graph. -Defaults to `16`. +Defaults to `16`. Only applicable to `hnsw` and `int8_hnsw` index types. `ef_construction`::: (Optional, integer) The number of candidates to track while assembling the list of nearest -neighbors for each new node. Defaults to `100`. +neighbors for each new node. Defaults to `100`. Only applicable to `hnsw` and `int8_hnsw` index types. `confidence_interval`::: (Optional, float) -Only applicable to `int8_hnsw` index types. The confidence interval to use when quantizing the vectors, +Only applicable to `int8_hnsw` and `int8_flat` index types. The confidence interval to use when quantizing the vectors, can be any value between and including `0.90` and `1.0`. This value restricts the values used when calculating the quantization thresholds. For example, a value of `0.95` will only use the middle 95% of the values when calculating the quantization thresholds (e.g. the highest and lowest 2.5% of values will be ignored). diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_flat.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_flat.yml new file mode 100644 index 0000000000000..7da00a02d4285 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_flat.yml @@ -0,0 +1,274 @@ +setup: + - skip: + version: ' - 8.12.99' + reason: 'kNN flat index added in 8.13' + - do: + indices.create: + index: flat + body: + mappings: + properties: + name: + type: keyword + vector: + type: dense_vector + dims: 5 + index: true + similarity: l2_norm + index_options: + type: flat + another_vector: + type: dense_vector + dims: 5 + index: true + similarity: l2_norm + index_options: + type: flat + + - do: + index: + index: flat + id: "1" + body: + name: cow.jpg + vector: [230.0, 300.33, -34.8988, 15.555, -200.0] + another_vector: [130.0, 115.0, -1.02, 15.555, -100.0] + + - do: + index: + index: flat + id: "2" + body: + name: moose.jpg + vector: [-0.5, 100.0, -13, 14.8, -156.0] + another_vector: [-0.5, 50.0, -1, 1, 120] + + - do: + index: + index: flat + id: "3" + body: + name: rabbit.jpg + vector: [0.5, 111.3, -13.0, 14.8, -156.0] + another_vector: [-0.5, 11.0, 0, 12, 111.0] + + - do: + indices.refresh: {} + +--- +"kNN search only": + - do: + search: + index: flat + body: + fields: [ "name" ] + knn: + field: vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 2 + num_candidates: 3 + + - match: {hits.hits.0._id: "2"} + - match: {hits.hits.0.fields.name.0: "moose.jpg"} + + - match: {hits.hits.1._id: "3"} + - match: {hits.hits.1.fields.name.0: "rabbit.jpg"} +--- +"kNN multi-field search only": + - do: + search: + index: flat + body: + fields: [ "name" ] + knn: + - {field: vector, query_vector: [-0.5, 90.0, -10, 14.8, -156.0], k: 2, num_candidates: 3} + - {field: another_vector, query_vector: [-0.5, 11.0, 0, 12, 111.0], k: 2, num_candidates: 3} + + - match: {hits.hits.0._id: "3"} + - match: {hits.hits.0.fields.name.0: "rabbit.jpg"} + + - match: {hits.hits.1._id: "2"} + - match: {hits.hits.1.fields.name.0: "moose.jpg"} +--- +"kNN search plus query": + - do: + search: + index: flat + body: + fields: [ "name" ] + knn: + field: vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 2 + num_candidates: 3 + query: + term: + name: cow.jpg + + - match: {hits.hits.0._id: "1"} + - match: {hits.hits.0.fields.name.0: "cow.jpg"} + + - match: {hits.hits.1._id: "2"} + - match: {hits.hits.1.fields.name.0: "moose.jpg"} + + - match: {hits.hits.2._id: "3"} + - match: {hits.hits.2.fields.name.0: "rabbit.jpg"} +--- +"kNN multi-field search with query": + - do: + search: + index: flat + body: + fields: [ "name" ] + knn: + - {field: vector, query_vector: [-0.5, 90.0, -10, 14.8, -156.0], k: 2, num_candidates: 3} + - {field: another_vector, query_vector: [-0.5, 11.0, 0, 12, 111.0], k: 2, num_candidates: 3} + query: + term: + name: cow.jpg + + - match: {hits.hits.0._id: "3"} + - match: {hits.hits.0.fields.name.0: "rabbit.jpg"} + + - match: {hits.hits.1._id: "1"} + - match: {hits.hits.1.fields.name.0: "cow.jpg"} + + - match: {hits.hits.2._id: "2"} + - match: {hits.hits.2.fields.name.0: "moose.jpg"} +--- +"kNN search with filter": + - do: + search: + index: flat + body: + fields: [ "name" ] + knn: + field: vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 2 + num_candidates: 3 + filter: + term: + name: "rabbit.jpg" + + - match: {hits.total.value: 1} + - match: {hits.hits.0._id: "3"} + - match: {hits.hits.0.fields.name.0: "rabbit.jpg"} + + - do: + search: + index: flat + body: + fields: [ "name" ] + knn: + field: vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 2 + num_candidates: 3 + filter: + - term: + name: "rabbit.jpg" + - term: + _id: 2 + + - match: {hits.total.value: 0} + +--- +"KNN Vector similarity search only": + - do: + search: + index: flat + body: + fields: [ "name" ] + knn: + num_candidates: 3 + k: 3 + field: vector + similarity: 11 + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + + - length: {hits.hits: 1} + + - match: {hits.hits.0._id: "2"} + - match: {hits.hits.0.fields.name.0: "moose.jpg"} +--- +"Vector similarity with filter only": + - do: + search: + index: flat + body: + fields: [ "name" ] + knn: + num_candidates: 3 + k: 3 + field: vector + similarity: 11 + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + filter: {"term": {"name": "moose.jpg"}} + + - length: {hits.hits: 1} + + - match: {hits.hits.0._id: "2"} + - match: {hits.hits.0.fields.name.0: "moose.jpg"} + + - do: + search: + index: flat + body: + fields: [ "name" ] + knn: + num_candidates: 3 + k: 3 + field: vector + similarity: 110 + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + filter: {"term": {"name": "cow.jpg"}} + + - length: {hits.hits: 0} +--- +"Cosine similarity with indexed vector": + - skip: + features: "headers" + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "cosineSimilarity(params.query_vector, 'vector')" + params: + query_vector: [0.5, 111.3, -13.0, 14.8, -156.0] + + - match: {hits.total: 3} + + - match: {hits.hits.0._id: "3"} + - gte: {hits.hits.0._score: 0.999} + - lte: {hits.hits.0._score: 1.001} + + - match: {hits.hits.1._id: "2"} + - gte: {hits.hits.1._score: 0.998} + - lte: {hits.hits.1._score: 1.0} + + - match: {hits.hits.2._id: "1"} + - gte: {hits.hits.2._score: 0.78} + - lte: {hits.hits.2._score: 0.791} +--- +"Test bad parameters": + - do: + catch: bad_request + indices.create: + index: bad_flat + body: + mappings: + properties: + vector: + type: dense_vector + dims: 5 + index: true + index_options: + type: flat + m: 42 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_int8_flat.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_int8_flat.yml new file mode 100644 index 0000000000000..81d49dad21a70 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/42_knn_search_int8_flat.yml @@ -0,0 +1,289 @@ +setup: + - skip: + version: ' - 8.12.99' + reason: 'kNN int8_flat index added in 8.13' + - do: + indices.create: + index: int8_flat + body: + mappings: + properties: + name: + type: keyword + vector: + type: dense_vector + dims: 5 + index: true + similarity: l2_norm + index_options: + type: int8_flat + another_vector: + type: dense_vector + dims: 5 + index: true + similarity: l2_norm + index_options: + type: int8_flat + + - do: + index: + index: int8_flat + id: "1" + body: + name: cow.jpg + vector: [230.0, 300.33, -34.8988, 15.555, -200.0] + another_vector: [130.0, 115.0, -1.02, 15.555, -100.0] + + - do: + index: + index: int8_flat + id: "2" + body: + name: moose.jpg + vector: [-0.5, 100.0, -13, 14.8, -156.0] + another_vector: [-0.5, 50.0, -1, 1, 120] + + - do: + index: + index: int8_flat + id: "3" + body: + name: rabbit.jpg + vector: [0.5, 111.3, -13.0, 14.8, -156.0] + another_vector: [-0.5, 11.0, 0, 12, 111.0] + + - do: + indices.refresh: {} + +--- +"kNN search only": + - do: + search: + index: int8_flat + body: + fields: [ "name" ] + knn: + field: vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 2 + num_candidates: 3 + + - match: {hits.hits.0._id: "2"} + - match: {hits.hits.0.fields.name.0: "moose.jpg"} + + - match: {hits.hits.1._id: "3"} + - match: {hits.hits.1.fields.name.0: "rabbit.jpg"} +--- +"kNN multi-field search only": + - do: + search: + index: int8_flat + body: + fields: [ "name" ] + knn: + - {field: vector, query_vector: [-0.5, 90.0, -10, 14.8, -156.0], k: 2, num_candidates: 3} + - {field: another_vector, query_vector: [-0.5, 11.0, 0, 12, 111.0], k: 2, num_candidates: 3} + + - match: {hits.hits.0._id: "3"} + - match: {hits.hits.0.fields.name.0: "rabbit.jpg"} + + - match: {hits.hits.1._id: "2"} + - match: {hits.hits.1.fields.name.0: "moose.jpg"} +--- +"kNN search plus query": + - do: + search: + index: int8_flat + body: + fields: [ "name" ] + knn: + field: vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 2 + num_candidates: 3 + query: + term: + name: cow.jpg + + - match: {hits.hits.0._id: "1"} + - match: {hits.hits.0.fields.name.0: "cow.jpg"} + + - match: {hits.hits.1._id: "2"} + - match: {hits.hits.1.fields.name.0: "moose.jpg"} + + - match: {hits.hits.2._id: "3"} + - match: {hits.hits.2.fields.name.0: "rabbit.jpg"} +--- +"kNN multi-field search with query": + - do: + search: + index: int8_flat + body: + fields: [ "name" ] + knn: + - {field: vector, query_vector: [-0.5, 90.0, -10, 14.8, -156.0], k: 2, num_candidates: 3} + - {field: another_vector, query_vector: [-0.5, 11.0, 0, 12, 111.0], k: 2, num_candidates: 3} + query: + term: + name: cow.jpg + + - match: {hits.hits.0._id: "3"} + - match: {hits.hits.0.fields.name.0: "rabbit.jpg"} + + - match: {hits.hits.1._id: "1"} + - match: {hits.hits.1.fields.name.0: "cow.jpg"} + + - match: {hits.hits.2._id: "2"} + - match: {hits.hits.2.fields.name.0: "moose.jpg"} +--- +"kNN search with filter": + - do: + search: + index: int8_flat + body: + fields: [ "name" ] + knn: + field: vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 2 + num_candidates: 3 + filter: + term: + name: "rabbit.jpg" + + - match: {hits.total.value: 1} + - match: {hits.hits.0._id: "3"} + - match: {hits.hits.0.fields.name.0: "rabbit.jpg"} + + - do: + search: + index: int8_flat + body: + fields: [ "name" ] + knn: + field: vector + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + k: 2 + num_candidates: 3 + filter: + - term: + name: "rabbit.jpg" + - term: + _id: 2 + + - match: {hits.total.value: 0} + +--- +"KNN Vector similarity search only": + - do: + search: + index: int8_flat + body: + fields: [ "name" ] + knn: + num_candidates: 3 + k: 3 + field: vector + similarity: 10.3 + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + + - length: {hits.hits: 1} + + - match: {hits.hits.0._id: "2"} + - match: {hits.hits.0.fields.name.0: "moose.jpg"} +--- +"Vector similarity with filter only": + - do: + search: + index: int8_flat + body: + fields: [ "name" ] + knn: + num_candidates: 3 + k: 3 + field: vector + similarity: 11 + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + filter: {"term": {"name": "moose.jpg"}} + + - length: {hits.hits: 1} + + - match: {hits.hits.0._id: "2"} + - match: {hits.hits.0.fields.name.0: "moose.jpg"} + + - do: + search: + index: int8_flat + body: + fields: [ "name" ] + knn: + num_candidates: 3 + k: 3 + field: vector + similarity: 110 + query_vector: [-0.5, 90.0, -10, 14.8, -156.0] + filter: {"term": {"name": "cow.jpg"}} + + - length: {hits.hits: 0} +--- +"Cosine similarity with indexed vector": + - skip: + features: "headers" + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "cosineSimilarity(params.query_vector, 'vector')" + params: + query_vector: [0.5, 111.3, -13.0, 14.8, -156.0] + + - match: {hits.total: 3} + + - match: {hits.hits.0._id: "3"} + - gte: {hits.hits.0._score: 0.999} + - lte: {hits.hits.0._score: 1.001} + + - match: {hits.hits.1._id: "2"} + - gte: {hits.hits.1._score: 0.998} + - lte: {hits.hits.1._score: 1.0} + + - match: {hits.hits.2._id: "1"} + - gte: {hits.hits.2._score: 0.78} + - lte: {hits.hits.2._score: 0.791} +--- +"Test bad parameters": + - do: + catch: bad_request + indices.create: + index: bad_int8_flat + body: + mappings: + properties: + vector: + type: dense_vector + dims: 5 + index: true + index_options: + type: int8_flat + m: 42 + + - do: + catch: bad_request + indices.create: + index: bad_int8_flat + body: + mappings: + properties: + vector: + type: dense_vector + dims: 5 + element_type: byte + index: true + index_options: + type: int8_flat diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 4bc5d95f06896..78086d28446b6 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -424,6 +424,10 @@ org.elasticsearch.index.codec.bloomfilter.ES87BloomFilterPostingsFormat, org.elasticsearch.index.codec.postings.ES812PostingsFormat; provides org.apache.lucene.codecs.DocValuesFormat with ES87TSDBDocValuesFormat; + provides org.apache.lucene.codecs.KnnVectorsFormat + with + org.elasticsearch.index.codec.vectors.ES813FlatVectorFormat, + org.elasticsearch.index.codec.vectors.ES813Int8FlatVectorFormat; exports org.elasticsearch.cluster.routing.allocation.shards to diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java new file mode 100644 index 0000000000000..1813601fc9477 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java @@ -0,0 +1,152 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.codec.vectors; + +import org.apache.lucene.codecs.FlatVectorsFormat; +import org.apache.lucene.codecs.FlatVectorsReader; +import org.apache.lucene.codecs.FlatVectorsWriter; +import org.apache.lucene.codecs.KnnFieldVectorsWriter; +import org.apache.lucene.codecs.KnnVectorsFormat; +import org.apache.lucene.codecs.KnnVectorsReader; +import org.apache.lucene.codecs.KnnVectorsWriter; +import org.apache.lucene.codecs.lucene99.Lucene99FlatVectorsFormat; +import org.apache.lucene.index.ByteVectorValues; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.FloatVectorValues; +import org.apache.lucene.index.MergeState; +import org.apache.lucene.index.SegmentReadState; +import org.apache.lucene.index.SegmentWriteState; +import org.apache.lucene.index.Sorter; +import org.apache.lucene.search.KnnCollector; +import org.apache.lucene.util.Bits; +import org.apache.lucene.util.hnsw.OrdinalTranslatedKnnCollector; +import org.apache.lucene.util.hnsw.RandomVectorScorer; + +import java.io.IOException; + +public class ES813FlatVectorFormat extends KnnVectorsFormat { + + static final String NAME = "ES813FlatVectorFormat"; + + private final FlatVectorsFormat format = new Lucene99FlatVectorsFormat(); + + /** + * Sole constructor + */ + public ES813FlatVectorFormat() { + super(NAME); + } + + @Override + public KnnVectorsWriter fieldsWriter(SegmentWriteState state) throws IOException { + return new ES813FlatVectorWriter(format.fieldsWriter(state)); + } + + @Override + public KnnVectorsReader fieldsReader(SegmentReadState state) throws IOException { + return new ES813FlatVectorReader(format.fieldsReader(state)); + } + + public static class ES813FlatVectorWriter extends KnnVectorsWriter { + + private final FlatVectorsWriter writer; + + public ES813FlatVectorWriter(FlatVectorsWriter writer) { + super(); + this.writer = writer; + } + + @Override + public KnnFieldVectorsWriter addField(FieldInfo fieldInfo) throws IOException { + return writer.addField(fieldInfo, null); + } + + @Override + public void flush(int maxDoc, Sorter.DocMap sortMap) throws IOException { + writer.flush(maxDoc, sortMap); + } + + @Override + public void finish() throws IOException { + writer.finish(); + } + + @Override + public void close() throws IOException { + writer.close(); + } + + @Override + public long ramBytesUsed() { + return writer.ramBytesUsed(); + } + + @Override + public void mergeOneField(FieldInfo fieldInfo, MergeState mergeState) throws IOException { + writer.mergeOneField(fieldInfo, mergeState); + } + } + + public static class ES813FlatVectorReader extends KnnVectorsReader { + + private final FlatVectorsReader reader; + + public ES813FlatVectorReader(FlatVectorsReader reader) { + super(); + this.reader = reader; + } + + @Override + public void checkIntegrity() throws IOException { + reader.checkIntegrity(); + } + + @Override + public FloatVectorValues getFloatVectorValues(String field) throws IOException { + return reader.getFloatVectorValues(field); + } + + @Override + public ByteVectorValues getByteVectorValues(String field) throws IOException { + return reader.getByteVectorValues(field); + } + + @Override + public void search(String field, float[] target, KnnCollector knnCollector, Bits acceptDocs) throws IOException { + collectAllMatchingDocs(knnCollector, acceptDocs, reader.getRandomVectorScorer(field, target)); + } + + private void collectAllMatchingDocs(KnnCollector knnCollector, Bits acceptDocs, RandomVectorScorer scorer) throws IOException { + OrdinalTranslatedKnnCollector collector = new OrdinalTranslatedKnnCollector(knnCollector, scorer::ordToDoc); + Bits acceptedOrds = scorer.getAcceptOrds(acceptDocs); + for (int i = 0; i < scorer.maxOrd(); i++) { + if (acceptedOrds == null || acceptedOrds.get(i)) { + collector.collect(i, scorer.score(i)); + collector.incVisitedCount(1); + } + } + assert collector.earlyTerminated() == false; + } + + @Override + public void search(String field, byte[] target, KnnCollector knnCollector, Bits acceptDocs) throws IOException { + collectAllMatchingDocs(knnCollector, acceptDocs, reader.getRandomVectorScorer(field, target)); + } + + @Override + public void close() throws IOException { + reader.close(); + } + + @Override + public long ramBytesUsed() { + return reader.ramBytesUsed(); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java new file mode 100644 index 0000000000000..5764f31d018c4 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java @@ -0,0 +1,158 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.codec.vectors; + +import org.apache.lucene.codecs.FlatVectorsFormat; +import org.apache.lucene.codecs.FlatVectorsReader; +import org.apache.lucene.codecs.FlatVectorsWriter; +import org.apache.lucene.codecs.KnnFieldVectorsWriter; +import org.apache.lucene.codecs.KnnVectorsFormat; +import org.apache.lucene.codecs.KnnVectorsReader; +import org.apache.lucene.codecs.KnnVectorsWriter; +import org.apache.lucene.codecs.lucene99.Lucene99ScalarQuantizedVectorsFormat; +import org.apache.lucene.index.ByteVectorValues; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.FloatVectorValues; +import org.apache.lucene.index.MergeState; +import org.apache.lucene.index.SegmentReadState; +import org.apache.lucene.index.SegmentWriteState; +import org.apache.lucene.index.Sorter; +import org.apache.lucene.search.KnnCollector; +import org.apache.lucene.util.Bits; +import org.apache.lucene.util.hnsw.OrdinalTranslatedKnnCollector; +import org.apache.lucene.util.hnsw.RandomVectorScorer; + +import java.io.IOException; + +public class ES813Int8FlatVectorFormat extends KnnVectorsFormat { + + static final String NAME = "ES813Int8FlatVectorFormat"; + + private final FlatVectorsFormat format; + + public ES813Int8FlatVectorFormat() { + this(null); + } + + /** + * Sole constructor + */ + public ES813Int8FlatVectorFormat(Float confidenceInterval) { + super(NAME); + this.format = new Lucene99ScalarQuantizedVectorsFormat(confidenceInterval); + } + + @Override + public KnnVectorsWriter fieldsWriter(SegmentWriteState state) throws IOException { + return new ES813FlatVectorWriter(format.fieldsWriter(state)); + } + + @Override + public KnnVectorsReader fieldsReader(SegmentReadState state) throws IOException { + return new ES813FlatVectorReader(format.fieldsReader(state)); + } + + public static class ES813FlatVectorWriter extends KnnVectorsWriter { + + private final FlatVectorsWriter writer; + + public ES813FlatVectorWriter(FlatVectorsWriter writer) { + super(); + this.writer = writer; + } + + @Override + public KnnFieldVectorsWriter addField(FieldInfo fieldInfo) throws IOException { + return writer.addField(fieldInfo, null); + } + + @Override + public void flush(int maxDoc, Sorter.DocMap sortMap) throws IOException { + writer.flush(maxDoc, sortMap); + } + + @Override + public void finish() throws IOException { + writer.finish(); + } + + @Override + public void close() throws IOException { + writer.close(); + } + + @Override + public long ramBytesUsed() { + return writer.ramBytesUsed(); + } + + @Override + public void mergeOneField(FieldInfo fieldInfo, MergeState mergeState) throws IOException { + writer.mergeOneField(fieldInfo, mergeState); + } + } + + public static class ES813FlatVectorReader extends KnnVectorsReader { + + private final FlatVectorsReader reader; + + public ES813FlatVectorReader(FlatVectorsReader reader) { + super(); + this.reader = reader; + } + + @Override + public void checkIntegrity() throws IOException { + reader.checkIntegrity(); + } + + @Override + public FloatVectorValues getFloatVectorValues(String field) throws IOException { + return reader.getFloatVectorValues(field); + } + + @Override + public ByteVectorValues getByteVectorValues(String field) throws IOException { + return reader.getByteVectorValues(field); + } + + @Override + public void search(String field, float[] target, KnnCollector knnCollector, Bits acceptDocs) throws IOException { + collectAllMatchingDocs(knnCollector, acceptDocs, reader.getRandomVectorScorer(field, target)); + } + + private void collectAllMatchingDocs(KnnCollector knnCollector, Bits acceptDocs, RandomVectorScorer scorer) throws IOException { + OrdinalTranslatedKnnCollector collector = new OrdinalTranslatedKnnCollector(knnCollector, scorer::ordToDoc); + Bits acceptedOrds = scorer.getAcceptOrds(acceptDocs); + for (int i = 0; i < scorer.maxOrd(); i++) { + if (acceptedOrds == null || acceptedOrds.get(i)) { + collector.collect(i, scorer.score(i)); + collector.incVisitedCount(1); + } + } + assert collector.earlyTerminated() == false; + } + + @Override + public void search(String field, byte[] target, KnnCollector knnCollector, Bits acceptDocs) throws IOException { + collectAllMatchingDocs(knnCollector, acceptDocs, reader.getRandomVectorScorer(field, target)); + } + + @Override + public void close() throws IOException { + reader.close(); + } + + @Override + public long ramBytesUsed() { + return reader.ramBytesUsed(); + } + + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index a9a31ba585177..d36ca9e0b25c1 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -46,6 +46,8 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.index.codec.vectors.ES813FlatVectorFormat; +import org.elasticsearch.index.codec.vectors.ES813Int8FlatVectorFormat; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.mapper.ArraySourceValueFetcher; @@ -842,6 +844,25 @@ public IndexOptions parseIndexOptions(String fieldName, Map indexOpti MappingParser.checkNoRemainingFields(fieldName, indexOptionsMap); return new Int8HnswIndexOptions(m, efConstruction, confidenceInterval); } + }, + FLAT("flat") { + @Override + public IndexOptions parseIndexOptions(String fieldName, Map indexOptionsMap) { + MappingParser.checkNoRemainingFields(fieldName, indexOptionsMap); + return new FlatIndexOptions(); + } + }, + INT8_FLAT("int8_flat") { + @Override + public IndexOptions parseIndexOptions(String fieldName, Map indexOptionsMap) { + Object confidenceIntervalNode = indexOptionsMap.remove("confidence_interval"); + Float confidenceInterval = null; + if (confidenceIntervalNode != null) { + confidenceInterval = (float) XContentMapValues.nodeDoubleValue(confidenceIntervalNode); + } + MappingParser.checkNoRemainingFields(fieldName, indexOptionsMap); + return new Int8FlatIndexOption(confidenceInterval); + } }; static Optional fromString(String type) { @@ -857,6 +878,80 @@ static Optional fromString(String type) { abstract IndexOptions parseIndexOptions(String fieldName, Map indexOptionsMap); } + private static class Int8FlatIndexOption extends IndexOptions { + private final Float confidenceInterval; + + Int8FlatIndexOption(Float confidenceInterval) { + super("int8_flat"); + this.confidenceInterval = confidenceInterval; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("type", type); + if (confidenceInterval != null) { + builder.field("confidence_interval", confidenceInterval); + } + builder.endObject(); + return builder; + } + + @Override + KnnVectorsFormat getVectorsFormat() { + return new ES813Int8FlatVectorFormat(confidenceInterval); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Int8FlatIndexOption that = (Int8FlatIndexOption) o; + return Objects.equals(confidenceInterval, that.confidenceInterval); + } + + @Override + public int hashCode() { + return Objects.hash(confidenceInterval); + } + + @Override + boolean supportsElementType(ElementType elementType) { + return elementType != ElementType.BYTE; + } + } + + private static class FlatIndexOptions extends IndexOptions { + + FlatIndexOptions() { + super("flat"); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("type", type); + builder.endObject(); + return builder; + } + + @Override + KnnVectorsFormat getVectorsFormat() { + return new ES813FlatVectorFormat(); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + return o != null && getClass() == o.getClass(); + } + + @Override + public int hashCode() { + return Objects.hash(type); + } + } + private static class Int8HnswIndexOptions extends IndexOptions { private final int m; private final int efConstruction; @@ -1186,7 +1281,6 @@ && isNotUnitVector(squaredMagnitude)) { case FLOAT -> parentFilter != null ? new ESDiversifyingChildrenFloatKnnVectorQuery(name(), queryVector, filter, numCands, parentFilter) : new ESKnnFloatVectorQuery(name(), queryVector, numCands, filter); - }; if (similarityThreshold != null) { diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java index 347bca245d144..091ce6f8a0f6d 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java @@ -8,16 +8,35 @@ package org.elasticsearch.search.vectors; +import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.KnnByteVectorQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.TopDocsCollector; +import org.apache.lucene.util.Bits; import org.elasticsearch.search.profile.query.QueryProfiler; +import java.io.IOException; + public class ESKnnByteVectorQuery extends KnnByteVectorQuery implements ProfilingQuery { + private static final TopDocs NO_RESULTS = TopDocsCollector.EMPTY_TOPDOCS; + private long vectorOpsCount; + private final byte[] target; public ESKnnByteVectorQuery(String field, byte[] target, int k, Query filter) { super(field, target, k, filter); + this.target = target; + } + + @Override + protected TopDocs approximateSearch(LeafReaderContext context, Bits acceptDocs, int visitedLimit) throws IOException { + // We increment visit limit by one to bypass a fencepost error in the collector + if (visitedLimit < Integer.MAX_VALUE) { + visitedLimit += 1; + } + TopDocs results = context.reader().searchNearestVectors(field, target, k, acceptDocs, visitedLimit); + return results != null ? results : NO_RESULTS; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java index e83a90a3c4df8..4fa4db1f4ea95 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java @@ -8,16 +8,24 @@ package org.elasticsearch.search.vectors; +import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.KnnFloatVectorQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.TopDocsCollector; +import org.apache.lucene.util.Bits; import org.elasticsearch.search.profile.query.QueryProfiler; +import java.io.IOException; + public class ESKnnFloatVectorQuery extends KnnFloatVectorQuery implements ProfilingQuery { + private static final TopDocs NO_RESULTS = TopDocsCollector.EMPTY_TOPDOCS; private long vectorOpsCount; + private final float[] target; public ESKnnFloatVectorQuery(String field, float[] target, int k, Query filter) { super(field, target, k, filter); + this.target = target; } @Override @@ -27,6 +35,16 @@ protected TopDocs mergeLeafResults(TopDocs[] perLeafResults) { return topK; } + @Override + protected TopDocs approximateSearch(LeafReaderContext context, Bits acceptDocs, int visitedLimit) throws IOException { + // We increment visit limit by one to bypass a fencepost error in the collector + if (visitedLimit < Integer.MAX_VALUE) { + visitedLimit += 1; + } + TopDocs results = context.reader().searchNearestVectors(field, target, k, acceptDocs, visitedLimit); + return results != null ? results : NO_RESULTS; + } + @Override public void profile(QueryProfiler queryProfiler) { queryProfiler.setVectorOpsCount(vectorOpsCount); diff --git a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.KnnVectorsFormat b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.KnnVectorsFormat new file mode 100644 index 0000000000000..ff848275f2ba1 --- /dev/null +++ b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.KnnVectorsFormat @@ -0,0 +1,2 @@ +org.elasticsearch.index.codec.vectors.ES813FlatVectorFormat +org.elasticsearch.index.codec.vectors.ES813Int8FlatVectorFormat diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java new file mode 100644 index 0000000000000..2f9148e80988e --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.codec.vectors; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.codecs.KnnVectorsFormat; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.tests.index.BaseKnnVectorsFormatTestCase; + +public class ES813FlatVectorFormatTests extends BaseKnnVectorsFormatTestCase { + @Override + protected Codec getCodec() { + return new Lucene99Codec() { + @Override + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return new ES813FlatVectorFormat(); + } + }; + } + + public void testSearchWithVisitedLimit() { + assumeTrue("requires graph based vector codec", false); + } + +} diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java new file mode 100644 index 0000000000000..07a922efd21a6 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.codec.vectors; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.codecs.KnnVectorsFormat; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.tests.index.BaseKnnVectorsFormatTestCase; + +public class ES813Int8FlatVectorFormatTests extends BaseKnnVectorsFormatTestCase { + @Override + protected Codec getCodec() { + return new Lucene99Codec() { + @Override + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return new ES813Int8FlatVectorFormat(); + } + }; + } + + public void testSearchWithVisitedLimit() { + assumeTrue("requires graph based vector codec", false); + } + +} From fabcf708836d06804cc00db37b7ec787d086e723 Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Mon, 5 Feb 2024 13:54:43 -0500 Subject: [PATCH 030/106] Switching evictor tests to use a deterministic queue (#105151) --- .../http/IdleConnectionEvictorTests.java | 36 ++++++++++--------- 1 file changed, 19 insertions(+), 17 deletions(-) diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/IdleConnectionEvictorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/IdleConnectionEvictorTests.java index f29120d9026a5..3c263adaddc46 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/IdleConnectionEvictorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/IdleConnectionEvictorTests.java @@ -10,17 +10,16 @@ import org.apache.http.impl.nio.conn.PoolingNHttpClientConnectionManager; import org.apache.http.impl.nio.reactor.DefaultConnectingIOReactor; import org.apache.http.nio.reactor.IOReactorException; +import org.elasticsearch.common.util.concurrent.DeterministicTaskQueue; import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.threadpool.ThreadPool; -import org.junit.After; import org.junit.Before; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.Mockito.doAnswer; @@ -32,16 +31,11 @@ public class IdleConnectionEvictorTests extends ESTestCase { private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); - private ThreadPool threadPool; + private DeterministicTaskQueue taskQueue; @Before public void init() { - threadPool = createThreadPool(inferenceUtilityPool()); - } - - @After - public void shutdown() { - terminate(threadPool); + taskQueue = new DeterministicTaskQueue(); } public void testStart_CallsExecutorSubmit() throws IOReactorException { @@ -87,7 +81,7 @@ public void testCloseExpiredConnections_IsCalled() throws InterruptedException { var manager = mock(PoolingNHttpClientConnectionManager.class); var evictor = new IdleConnectionEvictor( - threadPool, + taskQueue.getThreadPool(), manager, new TimeValue(1, TimeUnit.NANOSECONDS), new TimeValue(1, TimeUnit.NANOSECONDS) @@ -100,7 +94,8 @@ public void testCloseExpiredConnections_IsCalled() throws InterruptedException { return Void.TYPE; }).when(manager).closeExpiredConnections(); - evictor.start(); + startEvictor(evictor); + runLatch.await(TIMEOUT.getSeconds(), TimeUnit.SECONDS); verify(manager, times(1)).closeExpiredConnections(); @@ -110,7 +105,7 @@ public void testCloseIdleConnections_IsCalled() throws InterruptedException { var manager = mock(PoolingNHttpClientConnectionManager.class); var evictor = new IdleConnectionEvictor( - threadPool, + taskQueue.getThreadPool(), manager, new TimeValue(1, TimeUnit.NANOSECONDS), new TimeValue(1, TimeUnit.NANOSECONDS) @@ -123,7 +118,8 @@ public void testCloseIdleConnections_IsCalled() throws InterruptedException { return Void.TYPE; }).when(manager).closeIdleConnections(anyLong(), any()); - evictor.start(); + startEvictor(evictor); + runLatch.await(TIMEOUT.getSeconds(), TimeUnit.SECONDS); verify(manager, times(1)).closeIdleConnections(anyLong(), any()); @@ -131,32 +127,38 @@ public void testCloseIdleConnections_IsCalled() throws InterruptedException { public void testIsRunning_ReturnsTrue() throws IOReactorException { var evictor = new IdleConnectionEvictor( - threadPool, + taskQueue.getThreadPool(), createConnectionManager(), new TimeValue(1, TimeUnit.SECONDS), new TimeValue(1, TimeUnit.SECONDS) ); - evictor.start(); + startEvictor(evictor); + assertTrue(evictor.isRunning()); evictor.close(); } public void testIsRunning_ReturnsFalse() throws IOReactorException { var evictor = new IdleConnectionEvictor( - threadPool, + taskQueue.getThreadPool(), createConnectionManager(), new TimeValue(1, TimeUnit.SECONDS), new TimeValue(1, TimeUnit.SECONDS) ); - evictor.start(); + startEvictor(evictor); assertTrue(evictor.isRunning()); evictor.close(); assertFalse(evictor.isRunning()); } + private void startEvictor(IdleConnectionEvictor evictor) { + taskQueue.scheduleNow(evictor::start); + taskQueue.runAllRunnableTasks(); + } + private static PoolingNHttpClientConnectionManager createConnectionManager() throws IOReactorException { return new PoolingNHttpClientConnectionManager(new DefaultConnectingIOReactor()); } From f8795088349b8dfefbb948f6c250bd28a3e1c4c7 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 5 Feb 2024 20:35:15 +0100 Subject: [PATCH 031/106] Avoid building large CompositeByteBuf when sending transport messages (#105137) We can avoid building composite byte buf instances on the transport layer (they have quite a bit of overhead and make heap dumps more complicated to read). There's no need to add another round of references to the BytesReference components here. Just write these out as they come in. This would allow for some efficiency improving follow-ups where we can essentially release the pages that have passed the write pipeline. To avoid having this explode the size of the queue for writes per channel, I moved that to a linked list. The slowdown from a linked list is irrelevant I believe. Mostly the queue is empty so it doesn't matter or if it isn't empty, operations other than dequeuing are much more important to performance in this logic anyway (+ Netty internally uses a LL down the line anyway). I would regard this as step-1 in making the serialisation here more lazy like on the REST layer to avoid copying bytes to the outbound buffer that we already have as `byte[]`. --- .../transport/netty4/Netty4TcpChannel.java | 2 +- .../transport/netty4/Netty4Utils.java | 8 +- .../netty4/Netty4WriteThrottlingHandler.java | 37 ++++++-- .../Netty4WriteThrottlingHandlerTests.java | 90 ++++++++++++++++--- 4 files changed, 115 insertions(+), 22 deletions(-) diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4TcpChannel.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4TcpChannel.java index ad205c6f28783..33fdb00e7abb2 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4TcpChannel.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4TcpChannel.java @@ -165,7 +165,7 @@ public void sendMessage(BytesReference reference, ActionListener listener) // We need to both guard against double resolving the listener and not resolving it in case of event loop shutdown so we need to // use #notifyOnce here until https://github.com/netty/netty/issues/8007 is resolved. var wrapped = ActionListener.notifyOnce(listener); - channel.writeAndFlush(Netty4Utils.toByteBuf(reference), addPromise(wrapped, channel)); + channel.writeAndFlush(reference, addPromise(wrapped, channel)); if (channel.eventLoop().isShutdown()) { wrapped.onFailure(new TransportException("Cannot send message, event loop is shutting down.")); } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java index 2dae8bc9258fe..b9986dbf00d87 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java @@ -72,9 +72,13 @@ public static void setAvailableProcessors(final int availableProcessors) { * pages of the BytesReference. Don't free the bytes of reference before the ByteBuf goes out of scope. */ public static ByteBuf toByteBuf(final BytesReference reference) { - if (reference.length() == 0) { - return Unpooled.EMPTY_BUFFER; + if (reference.hasArray()) { + return Unpooled.wrappedBuffer(reference.array(), reference.arrayOffset(), reference.length()); } + return compositeReferenceToByteBuf(reference); + } + + private static ByteBuf compositeReferenceToByteBuf(BytesReference reference) { final BytesRefIterator iterator = reference.iterator(); // usually we have one, two, or three components from the header, the message, and a buffer final List buffers = new ArrayList<>(3); diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4WriteThrottlingHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4WriteThrottlingHandler.java index 3246c52e08bd0..ced2d7d65fa16 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4WriteThrottlingHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4WriteThrottlingHandler.java @@ -9,6 +9,7 @@ package org.elasticsearch.transport.netty4; import io.netty.buffer.ByteBuf; +import io.netty.buffer.Unpooled; import io.netty.channel.Channel; import io.netty.channel.ChannelDuplexHandler; import io.netty.channel.ChannelFuture; @@ -16,12 +17,17 @@ import io.netty.channel.ChannelPromise; import io.netty.util.concurrent.Future; import io.netty.util.concurrent.GenericFutureListener; +import io.netty.util.concurrent.PromiseCombiner; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefIterator; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.transport.Transports; +import java.io.IOException; import java.nio.channels.ClosedChannelException; -import java.util.ArrayDeque; +import java.util.LinkedList; import java.util.Queue; /** @@ -32,7 +38,7 @@ public final class Netty4WriteThrottlingHandler extends ChannelDuplexHandler { public static final int MAX_BYTES_PER_WRITE = 1 << 18; - private final Queue queuedWrites = new ArrayDeque<>(); + private final Queue queuedWrites = new LinkedList<>(); private final ThreadContext threadContext; private WriteOperation currentWrite; @@ -42,17 +48,36 @@ public Netty4WriteThrottlingHandler(ThreadContext threadContext) { } @Override - public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) { - assert msg instanceof ByteBuf; + public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws IOException { + if (msg instanceof BytesReference reference) { + if (reference.hasArray()) { + writeSingleByteBuf(ctx, Unpooled.wrappedBuffer(reference.array(), reference.arrayOffset(), reference.length()), promise); + } else { + BytesRefIterator iter = reference.iterator(); + final PromiseCombiner combiner = new PromiseCombiner(ctx.executor()); + BytesRef next; + while ((next = iter.next()) != null) { + final ChannelPromise chunkPromise = ctx.newPromise(); + combiner.add((Future) chunkPromise); + writeSingleByteBuf(ctx, Unpooled.wrappedBuffer(next.bytes, next.offset, next.length), chunkPromise); + } + combiner.finish(promise); + } + } else { + assert msg instanceof ByteBuf; + writeSingleByteBuf(ctx, (ByteBuf) msg, promise); + } + } + + private void writeSingleByteBuf(ChannelHandlerContext ctx, ByteBuf buf, ChannelPromise promise) { assert Transports.assertDefaultThreadContext(threadContext); assert Transports.assertTransportThread(); - final ByteBuf buf = (ByteBuf) msg; if (ctx.channel().isWritable() && currentWrite == null && queuedWrites.isEmpty()) { // nothing is queued for writing and the channel is writable, just pass the write down the pipeline directly if (buf.readableBytes() > MAX_BYTES_PER_WRITE) { writeInSlices(ctx, promise, buf); } else { - ctx.write(msg, promise); + ctx.write(buf, promise); } } else { queueWrite(buf, promise); diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4WriteThrottlingHandlerTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4WriteThrottlingHandlerTests.java index 59828649c58fb..8ac39d925f566 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4WriteThrottlingHandlerTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4WriteThrottlingHandlerTests.java @@ -14,6 +14,9 @@ import io.netty.channel.ChannelPromise; import io.netty.channel.embedded.EmbeddedChannel; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.bytes.CompositeBytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.ESTestCase; @@ -28,6 +31,7 @@ import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.oneOf; public class Netty4WriteThrottlingHandlerTests extends ESTestCase { @@ -56,42 +60,76 @@ public void testThrottlesLargeMessage() throws ExecutionException, InterruptedEx assertThat(writeableBytes, lessThan(Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE)); final int fullSizeChunks = randomIntBetween(2, 10); final int extraChunkSize = randomIntBetween(0, 10); - final ByteBuf message = Unpooled.wrappedBuffer( - randomByteArrayOfLength(Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE * fullSizeChunks + extraChunkSize) + final byte[] messageBytes = randomByteArrayOfLength( + Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE * fullSizeChunks + extraChunkSize ); + final Object message = wrapAsNettyOrEsBuffer(messageBytes); final ChannelPromise promise = embeddedChannel.newPromise(); transportGroup.getLowLevelGroup().submit(() -> embeddedChannel.write(message, promise)).get(); assertThat(seen, hasSize(1)); - assertEquals(message.slice(0, Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE), seen.get(0)); + assertSliceEquals(seen.get(0), message, 0, Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE); assertFalse(promise.isDone()); transportGroup.getLowLevelGroup().submit(embeddedChannel::flush).get(); assertTrue(promise.isDone()); assertThat(seen, hasSize(fullSizeChunks + (extraChunkSize == 0 ? 0 : 1))); assertTrue(capturingHandler.didWriteAfterThrottled); if (extraChunkSize != 0) { - assertEquals( - message.slice(Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE * fullSizeChunks, extraChunkSize), - seen.get(seen.size() - 1) + assertSliceEquals( + seen.get(seen.size() - 1), + message, + Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE * fullSizeChunks, + extraChunkSize ); } } - public void testPassesSmallMessageDirectly() throws ExecutionException, InterruptedException { + public void testThrottleLargeCompositeMessage() throws ExecutionException, InterruptedException { final List seen = new CopyOnWriteArrayList<>(); final CapturingHandler capturingHandler = new CapturingHandler(seen); final EmbeddedChannel embeddedChannel = new EmbeddedChannel( capturingHandler, new Netty4WriteThrottlingHandler(new ThreadContext(Settings.EMPTY)) ); + // we assume that the channel outbound buffer is smaller than Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE final int writeableBytes = Math.toIntExact(embeddedChannel.bytesBeforeUnwritable()); assertThat(writeableBytes, lessThan(Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE)); - final ByteBuf message = Unpooled.wrappedBuffer( - randomByteArrayOfLength(randomIntBetween(0, Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE)) + final int fullSizeChunks = randomIntBetween(2, 10); + final int extraChunkSize = randomIntBetween(0, 10); + final byte[] messageBytes = randomByteArrayOfLength( + Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE * fullSizeChunks + extraChunkSize + ); + int splitOffset = randomIntBetween(0, messageBytes.length); + final BytesReference message = CompositeBytesReference.of( + new BytesArray(messageBytes, 0, splitOffset), + new BytesArray(messageBytes, splitOffset, messageBytes.length - splitOffset) + ); + final ChannelPromise promise = embeddedChannel.newPromise(); + transportGroup.getLowLevelGroup().submit(() -> embeddedChannel.write(message, promise)).get(); + assertThat(seen, hasSize(oneOf(1, 2))); + assertSliceEquals(seen.get(0), message, 0, seen.get(0).readableBytes()); + assertFalse(promise.isDone()); + transportGroup.getLowLevelGroup().submit(embeddedChannel::flush).get(); + assertTrue(promise.isDone()); + assertThat(seen, hasSize(oneOf(fullSizeChunks, fullSizeChunks + 1))); + assertTrue(capturingHandler.didWriteAfterThrottled); + assertBufferEquals(Unpooled.compositeBuffer().addComponents(true, seen), message); + } + + public void testPassesSmallMessageDirectly() throws ExecutionException, InterruptedException { + final List seen = new CopyOnWriteArrayList<>(); + final CapturingHandler capturingHandler = new CapturingHandler(seen); + final EmbeddedChannel embeddedChannel = new EmbeddedChannel( + capturingHandler, + new Netty4WriteThrottlingHandler(new ThreadContext(Settings.EMPTY)) ); + final int writeableBytes = Math.toIntExact(embeddedChannel.bytesBeforeUnwritable()); + assertThat(writeableBytes, lessThan(Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE)); + final byte[] messageBytes = randomByteArrayOfLength(randomIntBetween(0, Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE)); + final Object message = wrapAsNettyOrEsBuffer(messageBytes); final ChannelPromise promise = embeddedChannel.newPromise(); transportGroup.getLowLevelGroup().submit(() -> embeddedChannel.write(message, promise)).get(); assertThat(seen, hasSize(1)); // first message should be passed through straight away - assertSame(message, seen.get(0)); + assertBufferEquals(seen.get(0), message); assertFalse(promise.isDone()); transportGroup.getLowLevelGroup().submit(embeddedChannel::flush).get(); assertTrue(promise.isDone()); @@ -107,13 +145,14 @@ public void testThrottlesOnUnwritable() throws ExecutionException, InterruptedEx ); final int writeableBytes = Math.toIntExact(embeddedChannel.bytesBeforeUnwritable()); assertThat(writeableBytes, lessThan(Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE)); - final ByteBuf message = Unpooled.wrappedBuffer(randomByteArrayOfLength(writeableBytes + randomIntBetween(0, 10))); + final byte[] messageBytes = randomByteArrayOfLength(writeableBytes + randomIntBetween(0, 10)); + final Object message = wrapAsNettyOrEsBuffer(messageBytes); final ChannelPromise promise = embeddedChannel.newPromise(); transportGroup.getLowLevelGroup().submit(() -> embeddedChannel.write(message, promise)).get(); assertThat(seen, hasSize(1)); // first message should be passed through straight away - assertSame(message, seen.get(0)); + assertBufferEquals(seen.get(0), message); assertFalse(promise.isDone()); - final ByteBuf messageToQueue = Unpooled.wrappedBuffer( + final Object messageToQueue = wrapAsNettyOrEsBuffer( randomByteArrayOfLength(randomIntBetween(0, Netty4WriteThrottlingHandler.MAX_BYTES_PER_WRITE)) ); final ChannelPromise promiseForQueued = embeddedChannel.newPromise(); @@ -126,6 +165,31 @@ public void testThrottlesOnUnwritable() throws ExecutionException, InterruptedEx assertTrue(promiseForQueued.isDone()); } + private static void assertBufferEquals(ByteBuf expected, Object message) { + if (message instanceof ByteBuf buf) { + assertSame(expected, buf); + } else { + assertEquals(expected, Netty4Utils.toByteBuf(asInstanceOf(BytesReference.class, message))); + } + } + + private static void assertSliceEquals(ByteBuf expected, Object message, int index, int length) { + assertEquals( + (message instanceof ByteBuf buf ? buf : Netty4Utils.toByteBuf(asInstanceOf(BytesReference.class, message))).slice( + index, + length + ), + expected + ); + } + + private static Object wrapAsNettyOrEsBuffer(byte[] messageBytes) { + if (randomBoolean()) { + return Unpooled.wrappedBuffer(messageBytes); + } + return new BytesArray(messageBytes); + } + private static class CapturingHandler extends ChannelOutboundHandlerAdapter { private final List seen; From 9d3a645d59117c8c52da0c8c825e3419571704b2 Mon Sep 17 00:00:00 2001 From: James Baiera Date: Mon, 5 Feb 2024 14:37:30 -0500 Subject: [PATCH 032/106] Redirect failed ingest node operations to a failure store when available (#103481) This PR updates the ingest service to detect if a failed ingest document was bound for a data stream configured with a failure store, and in that event, restores the document to its original state, transforms it with its failure information, and redirects it to the failure store for the data stream it was originally targeting. --- docs/changelog/103481.yaml | 5 + .../190_failure_store_redirection.yml | 110 ++++++ .../action/bulk/BulkRequestModifier.java | 103 +++++ .../action/bulk/FailureStoreDocument.java | 111 ++++++ .../action/bulk/TransportBulkAction.java | 84 ++++- .../action/index/IndexRequest.java | 36 +- .../elasticsearch/ingest/IngestService.java | 115 ++++-- .../bulk/FailureStoreDocumentTests.java | 69 ++++ .../bulk/TransportBulkActionIngestTests.java | 52 ++- .../action/bulk/TransportBulkActionTests.java | 98 +++++ .../ingest/IngestServiceTests.java | 357 +++++++++++++++++- .../metadata/DataStreamTestHelper.java | 34 +- 12 files changed, 1120 insertions(+), 54 deletions(-) create mode 100644 docs/changelog/103481.yaml create mode 100644 modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml create mode 100644 server/src/main/java/org/elasticsearch/action/bulk/FailureStoreDocument.java create mode 100644 server/src/test/java/org/elasticsearch/action/bulk/FailureStoreDocumentTests.java diff --git a/docs/changelog/103481.yaml b/docs/changelog/103481.yaml new file mode 100644 index 0000000000000..f7c7c0b6eecc9 --- /dev/null +++ b/docs/changelog/103481.yaml @@ -0,0 +1,5 @@ +pr: 103481 +summary: Redirect failed ingest node operations to a failure store when available +area: Data streams +type: feature +issues: [] diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml new file mode 100644 index 0000000000000..b9621977ff3aa --- /dev/null +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/190_failure_store_redirection.yml @@ -0,0 +1,110 @@ +--- +teardown: + - do: + indices.delete_data_stream: + name: logs-foobar + ignore: 404 + + - do: + indices.delete: + index: .fs-logs-foobar-* + ignore: 404 + + - do: + indices.delete_index_template: + name: generic_logs_template + ignore: 404 + + - do: + ingest.delete_pipeline: + id: "failing_pipeline" + ignore: 404 + +--- +"Redirect ingest failure in data stream to failure store": + - skip: + version: " - 8.12.99" + reason: "data stream failure stores only redirect ingest failures in 8.13+" + features: [allowed_warnings, contains] + + - do: + ingest.put_pipeline: + id: "failing_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "fail" : { + "message" : "error_message" + } + } + ] + } + - match: { acknowledged: true } + + - do: + allowed_warnings: + - "index template [generic_logs_template] has index patterns [logs-*] matching patterns from existing older templates [global] with patterns (global => [*]); this template [generic_logs_template] will take precedence during new index creation" + indices.put_index_template: + name: generic_logs_template + body: + index_patterns: logs-* + data_stream: + failure_store: true + template: + settings: + number_of_shards: 1 + number_of_replicas: 1 + index: + default_pipeline: "failing_pipeline" + + - do: + index: + index: logs-foobar + refresh: true + body: + '@timestamp': '2020-12-12' + foo: bar + + - do: + indices.get_data_stream: + name: logs-foobar + - match: { data_streams.0.name: logs-foobar } + - match: { data_streams.0.timestamp_field.name: '@timestamp' } + - length: { data_streams.0.indices: 1 } + - match: { data_streams.0.indices.0.index_name: '/\.ds-logs-foobar-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + - match: { data_streams.0.failure_store: true } + - length: { data_streams.0.failure_indices: 1 } + - match: { data_streams.0.failure_indices.0.index_name: '/\.fs-logs-foobar-(\d{4}\.\d{2}\.\d{2}-)?000001/' } + + - do: + search: + index: logs-foobar + body: { query: { match_all: {} } } + - length: { hits.hits: 0 } + + - do: + search: + index: .fs-logs-foobar-* + - length: { hits.hits: 1 } + - match: { hits.hits.0._index: "/\\.fs-logs-foobar-(\\d{4}\\.\\d{2}\\.\\d{2}-)?000001/" } + - exists: hits.hits.0._source.@timestamp + - not_exists: hits.hits.0._source.foo + - not_exists: hits.hits.0._source.document.id + - match: { hits.hits.0._source.document.index: 'logs-foobar' } + - match: { hits.hits.0._source.document.source.@timestamp: '2020-12-12' } + - match: { hits.hits.0._source.document.source.foo: 'bar' } + - match: { hits.hits.0._source.error.type: 'fail_processor_exception' } + - match: { hits.hits.0._source.error.message: 'error_message' } + - contains: { hits.hits.0._source.error.stack_trace: 'org.elasticsearch.ingest.common.FailProcessorException: error_message' } + + - do: + indices.delete_data_stream: + name: logs-foobar + - is_true: acknowledged + + - do: + indices.delete: + index: .fs-logs-foobar-* + - is_true: acknowledged diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestModifier.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestModifier.java index e42ddd41b0b0a..5e630bf9cdef5 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestModifier.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestModifier.java @@ -8,16 +8,22 @@ package org.elasticsearch.action.bulk; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.lucene.util.SparseFixedBitSet; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.update.UpdateResponse; +import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.Assertions; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.ingest.IngestService; +import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; @@ -30,8 +36,17 @@ import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; +/** + * Manages mutations to a bulk request that arise from the application of ingest pipelines. The modifier acts as an iterator over the + * documents of a bulk request, keeping a record of all dropped and failed write requests in the overall bulk operation. + * Once all pipelines have been applied, the modifier is used to create a new bulk request that will be used for executing the + * remaining writes. When this final bulk operation is completed, the modifier is used to combine the results with those from the + * ingest service to create the final bulk response. + */ final class BulkRequestModifier implements Iterator> { + private static final Logger logger = LogManager.getLogger(BulkRequestModifier.class); + private static final String DROPPED_OR_FAILED_ITEM_WITH_AUTO_GENERATED_ID = "auto-generated"; final BulkRequest bulkRequest; @@ -58,6 +73,13 @@ public boolean hasNext() { return (currentSlot + 1) < bulkRequest.requests().size(); } + /** + * Creates a new bulk request containing all documents from the original bulk request that have not been marked as failed + * or dropped. Any failed or dropped documents are tracked as a side effect of this call so that they may be reflected in the + * final bulk response. + * + * @return A new bulk request without the write operations removed during any ingest pipeline executions. + */ BulkRequest getBulkRequest() { if (itemResponses.isEmpty()) { return bulkRequest; @@ -80,6 +102,15 @@ BulkRequest getBulkRequest() { } } + /** + * If documents were dropped or failed in ingest, this method wraps the action listener that will be notified when the + * updated bulk operation is completed. The wrapped listener combines the dropped and failed document results from the ingest + * service with the results returned from running the remaining write operations. + * + * @param ingestTookInMillis Time elapsed for ingestion to be passed to final result. + * @param actionListener The action listener that expects the final bulk response. + * @return An action listener that combines ingest failure results with the results from writing the remaining documents. + */ ActionListener wrapActionListenerIfNeeded(long ingestTookInMillis, ActionListener actionListener) { if (itemResponses.isEmpty()) { return actionListener.map( @@ -138,6 +169,11 @@ private void assertResponsesAreCorrect(BulkItemResponse[] bulkResponses, BulkIte } } + /** + * Mark the document at the given slot in the bulk request as having failed in the ingest service. + * @param slot the slot in the bulk request to mark as failed. + * @param e the failure encountered. + */ synchronized void markItemAsFailed(int slot, Exception e) { final DocWriteRequest docWriteRequest = bulkRequest.requests().get(slot); final String id = Objects.requireNonNullElse(docWriteRequest.id(), DROPPED_OR_FAILED_ITEM_WITH_AUTO_GENERATED_ID); @@ -150,6 +186,10 @@ synchronized void markItemAsFailed(int slot, Exception e) { itemResponses.add(BulkItemResponse.failure(slot, docWriteRequest.opType(), failure)); } + /** + * Mark the document at the given slot in the bulk request as having been dropped by the ingest service. + * @param slot the slot in the bulk request to mark as dropped. + */ synchronized void markItemAsDropped(int slot) { final DocWriteRequest docWriteRequest = bulkRequest.requests().get(slot); final String id = Objects.requireNonNullElse(docWriteRequest.id(), DROPPED_OR_FAILED_ITEM_WITH_AUTO_GENERATED_ID); @@ -164,4 +204,67 @@ synchronized void markItemAsDropped(int slot) { ); itemResponses.add(BulkItemResponse.success(slot, docWriteRequest.opType(), dropped)); } + + /** + * Mark the document at the given slot in the bulk request as having failed in the ingest service. The document will be redirected + * to a data stream's failure store. + * @param slot the slot in the bulk request to redirect. + * @param targetIndexName the index that the document was targeting at the time of failure. + * @param e the failure encountered. + */ + public void markItemForFailureStore(int slot, String targetIndexName, Exception e) { + if (DataStream.isFailureStoreEnabled() == false) { + // Assert false for development, but if we somehow find ourselves here, default to failure logic. + assert false + : "Attempting to route a failed write request type to a failure store but the failure store is not enabled! " + + "This should be guarded against in TransportBulkAction#shouldStoreFailure()"; + markItemAsFailed(slot, e); + } else { + // We get the index write request to find the source of the failed document + IndexRequest indexRequest = TransportBulkAction.getIndexWriteRequest(bulkRequest.requests().get(slot)); + if (indexRequest == null) { + // This is unlikely to happen ever since only source oriented operations (index, create, upsert) are considered for + // ingest, but if it does happen, attempt to trip an assertion. If running in production, be defensive: Mark it failed + // as normal, and log the info for later debugging if needed. + assert false + : "Attempting to mark invalid write request type for failure store. Only IndexRequest or UpdateRequest allowed. " + + "type: [" + + bulkRequest.requests().get(slot).getClass().getName() + + "], index: [" + + targetIndexName + + "]"; + markItemAsFailed(slot, e); + logger.debug( + () -> "Attempted to redirect an invalid write operation after ingest failure - type: [" + + bulkRequest.requests().get(slot).getClass().getName() + + "], index: [" + + targetIndexName + + "]" + ); + } else { + try { + IndexRequest errorDocument = FailureStoreDocument.transformFailedRequest(indexRequest, e, targetIndexName); + // This is a fresh index request! We need to do some preprocessing on it. If we do not, when this is returned to + // the bulk action, the action will see that it hasn't been processed by ingest yet and attempt to ingest it again. + errorDocument.isPipelineResolved(true); + errorDocument.setPipeline(IngestService.NOOP_PIPELINE_NAME); + errorDocument.setFinalPipeline(IngestService.NOOP_PIPELINE_NAME); + bulkRequest.requests.set(slot, errorDocument); + } catch (IOException ioException) { + // This is unlikely to happen because the conversion is so simple, but be defensive and attempt to report about it + // if we need the info later. + e.addSuppressed(ioException); // Prefer to return the original exception to the end user instead of this new one. + logger.debug( + () -> "Encountered exception while attempting to redirect a failed ingest operation: index [" + + targetIndexName + + "], source: [" + + indexRequest.source().utf8ToString() + + "]", + ioException + ); + markItemAsFailed(slot, e); + } + } + } + } } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/FailureStoreDocument.java b/server/src/main/java/org/elasticsearch/action/bulk/FailureStoreDocument.java new file mode 100644 index 0000000000000..e0d6e8200e86d --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/bulk/FailureStoreDocument.java @@ -0,0 +1,111 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.bulk; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.json.JsonXContent; + +import java.io.IOException; +import java.util.Objects; +import java.util.function.Supplier; + +/** + * Transforms an indexing request using error information into a new index request to be stored in a data stream's failure store. + */ +public final class FailureStoreDocument { + + private FailureStoreDocument() {} + + /** + * Combines an {@link IndexRequest} that has failed during the bulk process with the error thrown for that request. The result is a + * new {@link IndexRequest} that can be stored in a data stream's failure store. + * @param source The original request that has failed to be ingested + * @param exception The exception that was thrown that caused the request to fail to be ingested + * @param targetIndexName The index that the request was targeting at time of failure + * @return A new {@link IndexRequest} with a failure store compliant structure + * @throws IOException If there is a problem when the document's new source is serialized + */ + public static IndexRequest transformFailedRequest(IndexRequest source, Exception exception, String targetIndexName) throws IOException { + return transformFailedRequest(source, exception, targetIndexName, System::currentTimeMillis); + } + + /** + * Combines an {@link IndexRequest} that has failed during the bulk process with the error thrown for that request. The result is a + * new {@link IndexRequest} that can be stored in a data stream's failure store. + * @param source The original request that has failed to be ingested + * @param exception The exception that was thrown that caused the request to fail to be ingested + * @param targetIndexName The index that the request was targeting at time of failure + * @param timeSupplier Supplies the value for the document's timestamp + * @return A new {@link IndexRequest} with a failure store compliant structure + * @throws IOException If there is a problem when the document's new source is serialized + */ + public static IndexRequest transformFailedRequest( + IndexRequest source, + Exception exception, + String targetIndexName, + Supplier timeSupplier + ) throws IOException { + return new IndexRequest().index(targetIndexName) + .source(createSource(source, exception, targetIndexName, timeSupplier)) + .opType(DocWriteRequest.OpType.CREATE) + .setWriteToFailureStore(true); + } + + private static XContentBuilder createSource( + IndexRequest source, + Exception exception, + String targetIndexName, + Supplier timeSupplier + ) throws IOException { + Objects.requireNonNull(source, "source must not be null"); + Objects.requireNonNull(exception, "exception must not be null"); + Objects.requireNonNull(targetIndexName, "targetIndexName must not be null"); + Objects.requireNonNull(timeSupplier, "timeSupplier must not be null"); + Throwable unwrapped = ExceptionsHelper.unwrapCause(exception); + XContentBuilder builder = JsonXContent.contentBuilder(); + builder.startObject(); + { + builder.timeField("@timestamp", timeSupplier.get()); + builder.startObject("document"); + { + if (source.id() != null) { + builder.field("id", source.id()); + } + if (source.routing() != null) { + builder.field("routing", source.routing()); + } + builder.field("index", source.index()); + // Unmapped source field + builder.startObject("source"); + { + builder.mapContents(source.sourceAsMap()); + } + builder.endObject(); + } + builder.endObject(); + builder.startObject("error"); + { + builder.field("type", ElasticsearchException.getExceptionName(unwrapped)); + builder.field("message", unwrapped.getMessage()); + builder.field("stack_trace", ExceptionsHelper.stackTrace(unwrapped)); + // Further fields not yet tracked (Need to expose via specific exceptions) + // - pipeline + // - pipeline_trace + // - processor + } + builder.endObject(); + } + builder.endObject(); + return builder; + } +} diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index e33f3c71e0076..2f12008501487 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -36,10 +36,12 @@ import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.MetadataIndexTemplateService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.Writeable; @@ -48,6 +50,7 @@ import org.elasticsearch.core.Assertions; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexingPressure; import org.elasticsearch.index.VersionType; @@ -62,6 +65,7 @@ import java.util.HashMap; import java.util.Map; import java.util.Objects; +import java.util.Optional; import java.util.Set; import java.util.SortedMap; import java.util.concurrent.TimeUnit; @@ -316,7 +320,7 @@ protected void doInternalExecute(Task task, BulkRequest bulkRequest, String exec assert arePipelinesResolved : bulkRequest; } if (clusterService.localNode().isIngestNode()) { - processBulkIndexIngestRequest(task, bulkRequest, executorName, l); + processBulkIndexIngestRequest(task, bulkRequest, executorName, metadata, l); } else { ingestForwarder.forwardIngestRequest(bulkAction, bulkRequest, l); } @@ -624,6 +628,7 @@ private void processBulkIndexIngestRequest( Task task, BulkRequest original, String executorName, + Metadata metadata, ActionListener listener ) { final long ingestStartTimeInNanos = System.nanoTime(); @@ -632,6 +637,8 @@ private void processBulkIndexIngestRequest( original.numberOfActions(), () -> bulkRequestModifier, bulkRequestModifier::markItemAsDropped, + (indexName) -> shouldStoreFailure(indexName, metadata, threadPool.absoluteTimeInMillis()), + bulkRequestModifier::markItemForFailureStore, bulkRequestModifier::markItemAsFailed, (originalThread, exception) -> { if (exception != null) { @@ -679,4 +686,79 @@ public boolean isForceExecution() { ); } + /** + * Determines if an index name is associated with either an existing data stream or a template + * for one that has the failure store enabled. + * @param indexName The index name to check. + * @param metadata Cluster state metadata. + * @param epochMillis A timestamp to use when resolving date math in the index name. + * @return true if the given index name corresponds to a data stream with a failure store, + * or if it matches a template that has a data stream failure store enabled. + */ + static boolean shouldStoreFailure(String indexName, Metadata metadata, long epochMillis) { + return DataStream.isFailureStoreEnabled() + && resolveFailureStoreFromMetadata(indexName, metadata, epochMillis).or( + () -> resolveFailureStoreFromTemplate(indexName, metadata) + ).orElse(false); + } + + /** + * Determines if an index name is associated with an existing data stream that has a failure store enabled. + * @param indexName The index name to check. + * @param metadata Cluster state metadata. + * @param epochMillis A timestamp to use when resolving date math in the index name. + * @return true if the given index name corresponds to an existing data stream with a failure store enabled. + */ + private static Optional resolveFailureStoreFromMetadata(String indexName, Metadata metadata, long epochMillis) { + if (indexName == null) { + return Optional.empty(); + } + + // Get index abstraction, resolving date math if it exists + IndexAbstraction indexAbstraction = metadata.getIndicesLookup() + .get(IndexNameExpressionResolver.resolveDateMathExpression(indexName, epochMillis)); + + // We only store failures if the failure is being written to a data stream, + // not when directly writing to backing indices/failure stores + if (indexAbstraction == null || indexAbstraction.isDataStreamRelated() == false) { + return Optional.empty(); + } + + // Locate the write index for the abstraction, and check if it has a data stream associated with it. + // This handles alias resolution as well as data stream resolution. + Index writeIndex = indexAbstraction.getWriteIndex(); + assert writeIndex != null : "Could not resolve write index for resource [" + indexName + "]"; + IndexAbstraction writeAbstraction = metadata.getIndicesLookup().get(writeIndex.getName()); + DataStream targetDataStream = writeAbstraction.getParentDataStream(); + + // We will store the failure if the write target belongs to a data stream with a failure store. + return Optional.of(targetDataStream != null && targetDataStream.isFailureStore()); + } + + /** + * Determines if an index name is associated with an index template that has a data stream failure store enabled. + * @param indexName The index name to check. + * @param metadata Cluster state metadata. + * @return true if the given index name corresponds to an index template with a data stream failure store enabled. + */ + private static Optional resolveFailureStoreFromTemplate(String indexName, Metadata metadata) { + if (indexName == null) { + return Optional.empty(); + } + + // Check to see if the index name matches any templates such that an index would have been attributed + // We don't check v1 templates at all because failure stores can only exist on data streams via a v2 template + String template = MetadataIndexTemplateService.findV2Template(metadata, indexName, false); + if (template != null) { + // Check if this is a data stream template or if it is just a normal index. + ComposableIndexTemplate composableIndexTemplate = metadata.templatesV2().get(template); + if (composableIndexTemplate.getDataStreamTemplate() != null) { + // Check if the data stream has the failure store enabled + return Optional.of(composableIndexTemplate.getDataStreamTemplate().hasFailureStore()); + } + } + + // Could not locate a failure store via template + return Optional.empty(); + } } diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java index b1ad328abda92..13ae065844318 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.index; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; @@ -19,6 +20,7 @@ import org.elasticsearch.action.support.replication.ReplicatedWriteRequest; import org.elasticsearch.action.support.replication.ReplicationRequest; import org.elasticsearch.client.internal.Requests; +import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.routing.IndexRouting; @@ -110,6 +112,11 @@ public class IndexRequest extends ReplicatedWriteRequest implement private boolean requireDataStream; + /** + * Transient flag denoting that the local request should be routed to a failure store. Not persisted across the wire. + */ + private boolean writeToFailureStore = false; + /** * This indicates whether the response to this request ought to list the ingest pipelines that were executed on the document */ @@ -821,7 +828,25 @@ public IndexRequest setRequireDataStream(boolean requireDataStream) { @Override public Index getConcreteWriteIndex(IndexAbstraction ia, Metadata metadata) { - return ia.getWriteIndex(this, metadata); + if (DataStream.isFailureStoreEnabled() && writeToFailureStore) { + if (ia.isDataStreamRelated() == false) { + throw new ElasticsearchException( + "Attempting to write a document to a failure store but the targeted index is not a data stream" + ); + } + // Resolve write index and get parent data stream to handle the case of dealing with an alias + String defaultWriteIndexName = ia.getWriteIndex().getName(); + DataStream dataStream = metadata.getIndicesLookup().get(defaultWriteIndexName).getParentDataStream(); + if (dataStream.getFailureIndices().size() < 1) { + throw new ElasticsearchException( + "Attempting to write a document to a failure store but the target data stream does not have one enabled" + ); + } + return dataStream.getFailureIndices().get(dataStream.getFailureIndices().size() - 1); + } else { + // Resolve as normal + return ia.getWriteIndex(this, metadata); + } } @Override @@ -834,6 +859,15 @@ public IndexRequest setRequireAlias(boolean requireAlias) { return this; } + public boolean isWriteToFailureStore() { + return writeToFailureStore; + } + + public IndexRequest setWriteToFailureStore(boolean writeToFailureStore) { + this.writeToFailureStore = writeToFailureStore; + return this; + } + public IndexRequest setListExecutedPipelines(boolean listExecutedPipelines) { this.listExecutedPipelines = listExecutedPipelines; return this; diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestService.java b/server/src/main/java/org/elasticsearch/ingest/IngestService.java index 3a2a810dc61b5..1f82ebd786e98 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -41,6 +41,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.MasterServiceTaskQueue; import org.elasticsearch.common.Priority; +import org.elasticsearch.common.TriConsumer; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; @@ -668,10 +669,41 @@ void validatePipeline(Map ingestInfos, String pipelin ExceptionsHelper.rethrowAndSuppress(exceptions); } + private record IngestPipelinesExecutionResult(boolean success, boolean shouldKeep, Exception exception, String failedIndex) { + + private static final IngestPipelinesExecutionResult SUCCESSFUL_RESULT = new IngestPipelinesExecutionResult(true, true, null, null); + private static final IngestPipelinesExecutionResult DISCARD_RESULT = new IngestPipelinesExecutionResult(true, false, null, null); + private static IngestPipelinesExecutionResult failAndStoreFor(String index, Exception e) { + return new IngestPipelinesExecutionResult(false, true, e, index); + } + } + + /** + * Executes all applicable pipelines for a collection of documents. + * @param numberOfActionRequests The total number of requests to process. + * @param actionRequests The collection of requests to be processed. + * @param onDropped A callback executed when a document is dropped by a pipeline. + * Accepts the slot in the collection of requests that the document occupies. + * @param shouldStoreFailure A predicate executed on each ingest failure to determine if the + * failure should be stored somewhere. + * @param onStoreFailure A callback executed when a document fails ingest but the failure should + * be persisted elsewhere. Accepts the slot in the collection of requests + * that the document occupies, the index name that the request was targeting + * at the time of failure, and the exception that the document encountered. + * @param onFailure A callback executed when a document fails ingestion and does not need to be + * persisted. Accepts the slot in the collection of requests that the document + * occupies, and the exception that the document encountered. + * @param onCompletion A callback executed once all documents have been processed. Accepts the thread + * that ingestion completed on or an exception in the event that the entire operation + * has failed. + * @param executorName Which executor the bulk request should be executed on. + */ public void executeBulkRequest( final int numberOfActionRequests, final Iterable> actionRequests, final IntConsumer onDropped, + final Predicate shouldStoreFailure, + final TriConsumer onStoreFailure, final BiConsumer onFailure, final BiConsumer onCompletion, final String executorName @@ -708,34 +740,45 @@ protected void doRun() { totalMetrics.preIngest(); final int slot = i; final Releasable ref = refs.acquire(); + DocumentParsingObserver documentParsingObserver = documentParsingObserverSupplier.get(); + final IngestDocument ingestDocument = newIngestDocument(indexRequest, documentParsingObserver); + final org.elasticsearch.script.Metadata originalDocumentMetadata = ingestDocument.getMetadata().clone(); // the document listener gives us three-way logic: a document can fail processing (1), or it can // be successfully processed. a successfully processed document can be kept (2) or dropped (3). - final ActionListener documentListener = ActionListener.runAfter(new ActionListener<>() { - @Override - public void onResponse(Boolean kept) { - assert kept != null; - if (kept == false) { - onDropped.accept(slot); + final ActionListener documentListener = ActionListener.runAfter( + new ActionListener<>() { + @Override + public void onResponse(IngestPipelinesExecutionResult result) { + assert result != null; + if (result.success) { + if (result.shouldKeep == false) { + onDropped.accept(slot); + } + } else { + // We were given a failure result in the onResponse method, so we must store the failure + // Recover the original document state, track a failed ingest, and pass it along + updateIndexRequestMetadata(indexRequest, originalDocumentMetadata); + totalMetrics.ingestFailed(); + onStoreFailure.apply(slot, result.failedIndex, result.exception); + } } - } - @Override - public void onFailure(Exception e) { - totalMetrics.ingestFailed(); - onFailure.accept(slot, e); + @Override + public void onFailure(Exception e) { + totalMetrics.ingestFailed(); + onFailure.accept(slot, e); + } + }, + () -> { + // regardless of success or failure, we always stop the ingest "stopwatch" and release the ref to indicate + // that we're finished with this document + final long ingestTimeInNanos = System.nanoTime() - startTimeInNanos; + totalMetrics.postIngest(ingestTimeInNanos); + ref.close(); } - }, () -> { - // regardless of success or failure, we always stop the ingest "stopwatch" and release the ref to indicate - // that we're finished with this document - final long ingestTimeInNanos = System.nanoTime() - startTimeInNanos; - totalMetrics.postIngest(ingestTimeInNanos); - ref.close(); - }); - DocumentParsingObserver documentParsingObserver = documentParsingObserverSupplier.get(); - - IngestDocument ingestDocument = newIngestDocument(indexRequest, documentParsingObserver); + ); - executePipelines(pipelines, indexRequest, ingestDocument, documentListener); + executePipelines(pipelines, indexRequest, ingestDocument, shouldStoreFailure, documentListener); indexRequest.setPipelinesHaveRun(); assert actionRequest.index() != null; @@ -825,7 +868,8 @@ private void executePipelines( final PipelineIterator pipelines, final IndexRequest indexRequest, final IngestDocument ingestDocument, - final ActionListener listener + final Predicate shouldStoreFailure, + final ActionListener listener ) { assert pipelines.hasNext(); PipelineSlot slot = pipelines.next(); @@ -835,13 +879,20 @@ private void executePipelines( // reset the reroute flag, at the start of a new pipeline execution this document hasn't been rerouted yet ingestDocument.resetReroute(); + final String originalIndex = indexRequest.indices()[0]; + final Consumer exceptionHandler = (Exception e) -> { + if (shouldStoreFailure.test(originalIndex)) { + listener.onResponse(IngestPipelinesExecutionResult.failAndStoreFor(originalIndex, e)); + } else { + listener.onFailure(e); + } + }; try { if (pipeline == null) { throw new IllegalArgumentException("pipeline with id [" + pipelineId + "] does not exist"); } indexRequest.addPipeline(pipelineId); - final String originalIndex = indexRequest.indices()[0]; executePipeline(ingestDocument, pipeline, (keep, e) -> { assert keep != null; @@ -855,12 +906,12 @@ private void executePipelines( ), e ); - listener.onFailure(e); + exceptionHandler.accept(e); return; // document failed! } if (keep == false) { - listener.onResponse(false); + listener.onResponse(IngestPipelinesExecutionResult.DISCARD_RESULT); return; // document dropped! } @@ -875,7 +926,7 @@ private void executePipelines( } catch (IllegalArgumentException ex) { // An IllegalArgumentException can be thrown when an ingest processor creates a source map that is self-referencing. // In that case, we catch and wrap the exception, so we can include more details - listener.onFailure( + exceptionHandler.accept( new IllegalArgumentException( format( "Failed to generate the source document for ingest pipeline [%s] for document [%s/%s]", @@ -895,7 +946,7 @@ private void executePipelines( if (Objects.equals(originalIndex, newIndex) == false) { // final pipelines cannot change the target index (either directly or by way of a reroute) if (isFinalPipeline) { - listener.onFailure( + exceptionHandler.accept( new IllegalStateException( format( "final pipeline [%s] can't change the target index (from [%s] to [%s]) for document [%s]", @@ -914,7 +965,7 @@ private void executePipelines( if (cycle) { List indexCycle = new ArrayList<>(ingestDocument.getIndexHistory()); indexCycle.add(newIndex); - listener.onFailure( + exceptionHandler.accept( new IllegalStateException( format( "index cycle detected while processing pipeline [%s] for document [%s]: %s", @@ -941,12 +992,12 @@ private void executePipelines( } if (newPipelines.hasNext()) { - executePipelines(newPipelines, indexRequest, ingestDocument, listener); + executePipelines(newPipelines, indexRequest, ingestDocument, shouldStoreFailure, listener); } else { // update the index request's source and (potentially) cache the timestamp for TSDB updateIndexRequestSource(indexRequest, ingestDocument); cacheRawTimestamp(indexRequest, ingestDocument); - listener.onResponse(true); // document succeeded! + listener.onResponse(IngestPipelinesExecutionResult.SUCCESSFUL_RESULT); // document succeeded! } }); } catch (Exception e) { @@ -954,7 +1005,7 @@ private void executePipelines( () -> format("failed to execute pipeline [%s] for document [%s/%s]", pipelineId, indexRequest.index(), indexRequest.id()), e ); - listener.onFailure(e); // document failed! + exceptionHandler.accept(e); // document failed } } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/FailureStoreDocumentTests.java b/server/src/test/java/org/elasticsearch/action/bulk/FailureStoreDocumentTests.java new file mode 100644 index 0000000000000..92fa67e9a6ffc --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/bulk/FailureStoreDocumentTests.java @@ -0,0 +1,69 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.bulk; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.transport.RemoteTransportException; +import org.elasticsearch.xcontent.ObjectPath; +import org.elasticsearch.xcontent.json.JsonXContent; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.nullValue; +import static org.hamcrest.CoreMatchers.startsWith; + +public class FailureStoreDocumentTests extends ESTestCase { + + public void testFailureStoreDocumentConverstion() throws Exception { + IndexRequest source = new IndexRequest("original_index").routing("fake_routing") + .id("1") + .source(JsonXContent.contentBuilder().startObject().field("key", "value").endObject()); + + // The exception will be wrapped for the test to make sure the converter correctly unwraps it + Exception exception = new ElasticsearchException("Test exception please ignore"); + exception = new RemoteTransportException("Test exception wrapper, please ignore", exception); + + String targetIndexName = "rerouted_index"; + long testTime = 1702357200000L; // 2023-12-12T05:00:00.000Z + + IndexRequest convertedRequest = FailureStoreDocument.transformFailedRequest(source, exception, targetIndexName, () -> testTime); + + // Retargeting write + assertThat(convertedRequest.id(), is(nullValue())); + assertThat(convertedRequest.routing(), is(nullValue())); + assertThat(convertedRequest.index(), is(equalTo(targetIndexName))); + assertThat(convertedRequest.opType(), is(DocWriteRequest.OpType.CREATE)); + + // Original document content is no longer in same place + assertThat("Expected original document to be modified", convertedRequest.sourceAsMap().get("key"), is(nullValue())); + + // Assert document contents + assertThat(ObjectPath.eval("@timestamp", convertedRequest.sourceAsMap()), is(equalTo("2023-12-12T05:00:00.000Z"))); + + assertThat(ObjectPath.eval("document.id", convertedRequest.sourceAsMap()), is(equalTo("1"))); + assertThat(ObjectPath.eval("document.routing", convertedRequest.sourceAsMap()), is(equalTo("fake_routing"))); + assertThat(ObjectPath.eval("document.index", convertedRequest.sourceAsMap()), is(equalTo("original_index"))); + assertThat(ObjectPath.eval("document.source.key", convertedRequest.sourceAsMap()), is(equalTo("value"))); + + assertThat(ObjectPath.eval("error.type", convertedRequest.sourceAsMap()), is(equalTo("exception"))); + assertThat(ObjectPath.eval("error.message", convertedRequest.sourceAsMap()), is(equalTo("Test exception please ignore"))); + assertThat( + ObjectPath.eval("error.stack_trace", convertedRequest.sourceAsMap()), + startsWith( + "org.elasticsearch.ElasticsearchException: Test exception please ignore\n" + + "\tat org.elasticsearch.action.bulk.FailureStoreDocumentTests.testFailureStoreDocumentConverstion" + ) + ); + + assertThat(convertedRequest.isWriteToFailureStore(), is(true)); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java index 188adf396435f..564cf74697194 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.TriConsumer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.core.Nullable; @@ -54,13 +55,16 @@ import org.mockito.Captor; import org.mockito.MockitoAnnotations; +import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; +import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.BiConsumer; +import java.util.function.Predicate; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.sameInstance; @@ -82,6 +86,7 @@ public class TransportBulkActionIngestTests extends ESTestCase { */ private static final String WITH_DEFAULT_PIPELINE = "index_with_default_pipeline"; private static final String WITH_DEFAULT_PIPELINE_ALIAS = "alias_for_index_with_default_pipeline"; + private static final String WITH_FAILURE_STORE_ENABLED = "data-stream-failure-store-enabled"; private static final Settings SETTINGS = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), true).build(); @@ -95,6 +100,10 @@ public class TransportBulkActionIngestTests extends ESTestCase { /** Arguments to callbacks we want to capture, but which require generics, so we must use @Captor */ @Captor + ArgumentCaptor> redirectPredicate; + @Captor + ArgumentCaptor> redirectHandler; + @Captor ArgumentCaptor> failureHandler; @Captor ArgumentCaptor> completionHandler; @@ -174,7 +183,7 @@ class TestSingleItemBulkWriteAction extends TransportSingleItemBulkWriteAction> req = bulkDocsItr.getValue().iterator(); failureHandler.getValue().accept(0, exception); // have an exception for our one index request indexRequest2.setPipeline(IngestService.NOOP_PIPELINE_NAME); // this is done by the real pipeline execution service when processing - completionHandler.getValue().accept(DUMMY_WRITE_THREAD, null); + assertTrue(redirectPredicate.getValue().test(WITH_FAILURE_STORE_ENABLED + "-1")); // ensure redirects on failure store data stream + assertFalse(redirectPredicate.getValue().test(WITH_DEFAULT_PIPELINE)); // no redirects for random existing indices + assertFalse(redirectPredicate.getValue().test("index")); // no redirects for non-existant indices with no templates + redirectHandler.getValue().apply(2, WITH_FAILURE_STORE_ENABLED + "-1", exception); // exception and redirect for request 3 (slot 2) + completionHandler.getValue().accept(DUMMY_WRITE_THREAD, null); // all ingestion completed assertTrue(action.isExecuted); assertFalse(responseCalled.get()); // listener would only be called by real index action, not our mocked one verifyNoMoreInteractions(transportService); @@ -322,6 +350,8 @@ public void testSingleItemBulkActionIngestLocal() throws Exception { eq(1), bulkDocsItr.capture(), any(), + any(), + any(), failureHandler.capture(), completionHandler.capture(), eq(Names.WRITE) @@ -368,6 +398,8 @@ public void testIngestSystemLocal() throws Exception { eq(bulkRequest.numberOfActions()), bulkDocsItr.capture(), any(), + any(), + any(), failureHandler.capture(), completionHandler.capture(), eq(Names.SYSTEM_WRITE) @@ -401,7 +433,7 @@ public void testIngestForward() throws Exception { ActionTestUtils.execute(action, null, bulkRequest, listener); // should not have executed ingest locally - verify(ingestService, never()).executeBulkRequest(anyInt(), any(), any(), any(), any(), any()); + verify(ingestService, never()).executeBulkRequest(anyInt(), any(), any(), any(), any(), any(), any(), any()); // but instead should have sent to a remote node with the transport service ArgumentCaptor node = ArgumentCaptor.forClass(DiscoveryNode.class); verify(transportService).sendRequest(node.capture(), eq(BulkAction.NAME), any(), remoteResponseHandler.capture()); @@ -441,7 +473,7 @@ public void testSingleItemBulkActionIngestForward() throws Exception { ActionTestUtils.execute(singleItemBulkWriteAction, null, indexRequest, listener); // should not have executed ingest locally - verify(ingestService, never()).executeBulkRequest(anyInt(), any(), any(), any(), any(), any()); + verify(ingestService, never()).executeBulkRequest(anyInt(), any(), any(), any(), any(), any(), any(), any()); // but instead should have sent to a remote node with the transport service ArgumentCaptor node = ArgumentCaptor.forClass(DiscoveryNode.class); verify(transportService).sendRequest(node.capture(), eq(BulkAction.NAME), any(), remoteResponseHandler.capture()); @@ -525,6 +557,8 @@ private void validatePipelineWithBulkUpsert(@Nullable String indexRequestIndexNa eq(bulkRequest.numberOfActions()), bulkDocsItr.capture(), any(), + any(), + any(), failureHandler.capture(), completionHandler.capture(), eq(Names.WRITE) @@ -573,6 +607,8 @@ public void testDoExecuteCalledTwiceCorrectly() throws Exception { eq(1), bulkDocsItr.capture(), any(), + any(), + any(), failureHandler.capture(), completionHandler.capture(), eq(Names.WRITE) @@ -667,6 +703,8 @@ public void testFindDefaultPipelineFromTemplateMatch() { eq(1), bulkDocsItr.capture(), any(), + any(), + any(), failureHandler.capture(), completionHandler.capture(), eq(Names.WRITE) @@ -705,6 +743,8 @@ public void testFindDefaultPipelineFromV2TemplateMatch() { eq(1), bulkDocsItr.capture(), any(), + any(), + any(), failureHandler.capture(), completionHandler.capture(), eq(Names.WRITE) @@ -732,6 +772,8 @@ public void testIngestCallbackExceptionHandled() throws Exception { eq(bulkRequest.numberOfActions()), bulkDocsItr.capture(), any(), + any(), + any(), failureHandler.capture(), completionHandler.capture(), eq(Names.WRITE) @@ -769,6 +811,8 @@ private void validateDefaultPipeline(IndexRequest indexRequest) { eq(1), bulkDocsItr.capture(), any(), + any(), + any(), failureHandler.capture(), completionHandler.capture(), eq(Names.WRITE) diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java index c3a1747902893..6f3767892e7a4 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java @@ -21,7 +21,9 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.DataStreamTestHelper; import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexAbstraction.ConcreteIndex; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -52,6 +54,7 @@ import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.SortedMap; import java.util.TreeMap; import java.util.concurrent.TimeUnit; @@ -61,6 +64,7 @@ import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; +import static org.junit.Assume.assumeThat; public class TransportBulkActionTests extends ESTestCase { @@ -336,6 +340,100 @@ public void testRejectionAfterCreateIndexIsPropagated() throws Exception { } } + public void testResolveFailureStoreFromMetadata() throws Exception { + assumeThat(DataStream.isFailureStoreEnabled(), is(true)); + + String dataStreamWithFailureStore = "test-data-stream-failure-enabled"; + String dataStreamWithoutFailureStore = "test-data-stream-failure-disabled"; + long testTime = randomMillisUpToYear9999(); + + IndexMetadata backingIndex1 = DataStreamTestHelper.createFirstBackingIndex(dataStreamWithFailureStore, testTime).build(); + IndexMetadata backingIndex2 = DataStreamTestHelper.createFirstBackingIndex(dataStreamWithoutFailureStore, testTime).build(); + IndexMetadata failureStoreIndex1 = DataStreamTestHelper.createFirstFailureStore(dataStreamWithFailureStore, testTime).build(); + + Metadata metadata = Metadata.builder() + .dataStreams( + Map.of( + dataStreamWithFailureStore, + DataStreamTestHelper.newInstance( + dataStreamWithFailureStore, + List.of(backingIndex1.getIndex()), + 1L, + Map.of(), + false, + null, + List.of(failureStoreIndex1.getIndex()) + ), + dataStreamWithoutFailureStore, + DataStreamTestHelper.newInstance( + dataStreamWithoutFailureStore, + List.of(backingIndex2.getIndex()), + 1L, + Map.of(), + false, + null, + List.of() + ) + ), + Map.of() + ) + .indices( + Map.of( + backingIndex1.getIndex().getName(), + backingIndex1, + backingIndex2.getIndex().getName(), + backingIndex2, + failureStoreIndex1.getIndex().getName(), + failureStoreIndex1 + ) + ) + .build(); + + // Data stream with failure store should store failures + assertThat(TransportBulkAction.shouldStoreFailure(dataStreamWithFailureStore, metadata, testTime), is(true)); + // Data stream without failure store should not + assertThat(TransportBulkAction.shouldStoreFailure(dataStreamWithoutFailureStore, metadata, testTime), is(false)); + // An index should not be considered for failure storage + assertThat(TransportBulkAction.shouldStoreFailure(backingIndex1.getIndex().getName(), metadata, testTime), is(false)); + // even if that index is itself a failure store + assertThat(TransportBulkAction.shouldStoreFailure(failureStoreIndex1.getIndex().getName(), metadata, testTime), is(false)); + } + + public void testResolveFailureStoreFromTemplate() throws Exception { + assumeThat(DataStream.isFailureStoreEnabled(), is(true)); + + String dsTemplateWithFailureStore = "test-data-stream-failure-enabled"; + String dsTemplateWithoutFailureStore = "test-data-stream-failure-disabled"; + String indexTemplate = "test-index"; + long testTime = randomMillisUpToYear9999(); + + Metadata metadata = Metadata.builder() + .indexTemplates( + Map.of( + dsTemplateWithFailureStore, + ComposableIndexTemplate.builder() + .indexPatterns(List.of(dsTemplateWithFailureStore + "-*")) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false, true)) + .build(), + dsTemplateWithoutFailureStore, + ComposableIndexTemplate.builder() + .indexPatterns(List.of(dsTemplateWithoutFailureStore + "-*")) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false, false)) + .build(), + indexTemplate, + ComposableIndexTemplate.builder().indexPatterns(List.of(indexTemplate + "-*")).build() + ) + ) + .build(); + + // Data stream with failure store should store failures + assertThat(TransportBulkAction.shouldStoreFailure(dsTemplateWithFailureStore + "-1", metadata, testTime), is(true)); + // Data stream without failure store should not + assertThat(TransportBulkAction.shouldStoreFailure(dsTemplateWithoutFailureStore + "-1", metadata, testTime), is(false)); + // An index template should not be considered for failure storage + assertThat(TransportBulkAction.shouldStoreFailure(indexTemplate + "-1", metadata, testTime), is(false)); + } + private BulkRequest buildBulkRequest(List indices) { BulkRequest request = new BulkRequest(); for (String index : indices) { diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java index d345197d88a23..26aa5b1e0454f 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java @@ -40,6 +40,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterStateTaskExecutorUtils; +import org.elasticsearch.common.TriConsumer; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; @@ -91,6 +92,7 @@ import java.util.function.Consumer; import java.util.function.IntConsumer; import java.util.function.LongSupplier; +import java.util.function.Predicate; import java.util.function.Supplier; import java.util.stream.Collectors; @@ -208,7 +210,16 @@ public void testExecuteIndexPipelineDoesNotExist() { @SuppressWarnings("unchecked") final BiConsumer completionHandler = mock(BiConsumer.class); - ingestService.executeBulkRequest(1, List.of(indexRequest), indexReq -> {}, failureHandler, completionHandler, Names.WRITE); + ingestService.executeBulkRequest( + 1, + List.of(indexRequest), + indexReq -> {}, + (s) -> false, + (slot, targetIndex, e) -> fail("Should not be redirecting failures"), + failureHandler, + completionHandler, + Names.WRITE + ); assertTrue(failure.get()); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); @@ -1111,6 +1122,8 @@ public String getType() { bulkRequest.numberOfActions(), bulkRequest.requests(), indexReq -> {}, + (s) -> false, + (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, Names.WRITE @@ -1154,6 +1167,8 @@ public void testExecuteBulkPipelineDoesNotExist() { bulkRequest.numberOfActions(), bulkRequest.requests(), indexReq -> {}, + (s) -> false, + (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, Names.WRITE @@ -1218,6 +1233,8 @@ public void close() { bulkRequest.numberOfActions(), bulkRequest.requests(), indexReq -> {}, + (s) -> false, + (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, Names.WRITE @@ -1247,7 +1264,16 @@ public void testExecuteSuccess() { final BiConsumer failureHandler = mock(BiConsumer.class); @SuppressWarnings("unchecked") final BiConsumer completionHandler = mock(BiConsumer.class); - ingestService.executeBulkRequest(1, List.of(indexRequest), indexReq -> {}, failureHandler, completionHandler, Names.WRITE); + ingestService.executeBulkRequest( + 1, + List.of(indexRequest), + indexReq -> {}, + (s) -> false, + (slot, targetIndex, e) -> fail("Should not be redirecting failures"), + failureHandler, + completionHandler, + Names.WRITE + ); verify(failureHandler, never()).accept(any(), any()); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); } @@ -1280,7 +1306,16 @@ public void testDynamicTemplates() throws Exception { CountDownLatch latch = new CountDownLatch(1); final BiConsumer failureHandler = (v, e) -> { throw new AssertionError("must never fail", e); }; final BiConsumer completionHandler = (t, e) -> latch.countDown(); - ingestService.executeBulkRequest(1, List.of(indexRequest), indexReq -> {}, failureHandler, completionHandler, Names.WRITE); + ingestService.executeBulkRequest( + 1, + List.of(indexRequest), + indexReq -> {}, + (s) -> false, + (slot, targetIndex, e) -> fail("Should not be redirecting failures"), + failureHandler, + completionHandler, + Names.WRITE + ); latch.await(); assertThat(indexRequest.getDynamicTemplates(), equalTo(Map.of("foo", "bar", "foo.bar", "baz"))); } @@ -1301,7 +1336,16 @@ public void testExecuteEmptyPipeline() throws Exception { final BiConsumer failureHandler = mock(BiConsumer.class); @SuppressWarnings("unchecked") final BiConsumer completionHandler = mock(BiConsumer.class); - ingestService.executeBulkRequest(1, List.of(indexRequest), indexReq -> {}, failureHandler, completionHandler, Names.WRITE); + ingestService.executeBulkRequest( + 1, + List.of(indexRequest), + indexReq -> {}, + (s) -> false, + (slot, targetIndex, e) -> fail("Should not be redirecting failures"), + failureHandler, + completionHandler, + Names.WRITE + ); verify(failureHandler, never()).accept(any(), any()); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); } @@ -1355,7 +1399,16 @@ public void testExecutePropagateAllMetadataUpdates() throws Exception { final BiConsumer failureHandler = mock(BiConsumer.class); @SuppressWarnings("unchecked") final BiConsumer completionHandler = mock(BiConsumer.class); - ingestService.executeBulkRequest(1, List.of(indexRequest), indexReq -> {}, failureHandler, completionHandler, Names.WRITE); + ingestService.executeBulkRequest( + 1, + List.of(indexRequest), + indexReq -> {}, + (s) -> false, + (slot, targetIndex, e) -> fail("Should not be redirecting failures"), + failureHandler, + completionHandler, + Names.WRITE + ); verify(processor).execute(any(), any()); verify(failureHandler, never()).accept(any(), any()); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); @@ -1404,7 +1457,16 @@ public void testExecuteFailure() throws Exception { final BiConsumer failureHandler = mock(BiConsumer.class); @SuppressWarnings("unchecked") final BiConsumer completionHandler = mock(BiConsumer.class); - ingestService.executeBulkRequest(1, List.of(indexRequest), indexReq -> {}, failureHandler, completionHandler, Names.WRITE); + ingestService.executeBulkRequest( + 1, + List.of(indexRequest), + indexReq -> {}, + (s) -> false, + (slot, targetIndex, e) -> fail("Should not be redirecting failures"), + failureHandler, + completionHandler, + Names.WRITE + ); verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any()); verify(failureHandler, times(1)).accept(eq(0), any(RuntimeException.class)); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); @@ -1453,7 +1515,16 @@ public void testExecuteSuccessWithOnFailure() throws Exception { final BiConsumer failureHandler = mock(BiConsumer.class); @SuppressWarnings("unchecked") final BiConsumer completionHandler = mock(BiConsumer.class); - ingestService.executeBulkRequest(1, List.of(indexRequest), indexReq -> {}, failureHandler, completionHandler, Names.WRITE); + ingestService.executeBulkRequest( + 1, + List.of(indexRequest), + indexReq -> {}, + (s) -> false, + (slot, targetIndex, e) -> fail("Should not be redirecting failures"), + failureHandler, + completionHandler, + Names.WRITE + ); verify(failureHandler, never()).accept(eq(0), any(IngestProcessorException.class)); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); } @@ -1496,7 +1567,16 @@ public void testExecuteFailureWithNestedOnFailure() throws Exception { final BiConsumer failureHandler = mock(BiConsumer.class); @SuppressWarnings("unchecked") final BiConsumer completionHandler = mock(BiConsumer.class); - ingestService.executeBulkRequest(1, List.of(indexRequest), indexReq -> {}, failureHandler, completionHandler, Names.WRITE); + ingestService.executeBulkRequest( + 1, + List.of(indexRequest), + indexReq -> {}, + (s) -> false, + (slot, targetIndex, e) -> fail("Should not be redirecting failures"), + failureHandler, + completionHandler, + Names.WRITE + ); verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any()); verify(failureHandler, times(1)).accept(eq(0), any(RuntimeException.class)); verify(completionHandler, times(1)).accept(Thread.currentThread(), null); @@ -1554,6 +1634,8 @@ public void testBulkRequestExecutionWithFailures() throws Exception { numRequest, bulkRequest.requests(), indexReq -> {}, + (s) -> false, + (slot, targetIndex, e) -> fail("Should not be redirecting failures"), requestItemErrorHandler, completionHandler, Names.WRITE @@ -1563,6 +1645,184 @@ public void testBulkRequestExecutionWithFailures() throws Exception { verify(completionHandler, times(1)).accept(Thread.currentThread(), null); } + public void testExecuteFailureRedirection() throws Exception { + final CompoundProcessor processor = mockCompoundProcessor(); + IngestService ingestService = createWithProcessors( + Map.of( + "mock", + (factories, tag, description, config) -> processor, + "set", + (factories, tag, description, config) -> new FakeProcessor("set", "", "", (ingestDocument) -> fail()) + ) + ); + PutPipelineRequest putRequest1 = new PutPipelineRequest( + "_id1", + new BytesArray("{\"processors\": [{\"mock\" : {}}]}"), + XContentType.JSON + ); + // given that set -> fail() above, it's a failure if a document executes against this pipeline + PutPipelineRequest putRequest2 = new PutPipelineRequest( + "_id2", + new BytesArray("{\"processors\": [{\"set\" : {}}]}"), + XContentType.JSON + ); + ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty + ClusterState previousClusterState = clusterState; + clusterState = executePut(putRequest1, clusterState); + clusterState = executePut(putRequest2, clusterState); + ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); + final IndexRequest indexRequest = new IndexRequest("_index").id("_id") + .source(Map.of()) + .setPipeline("_id1") + .setFinalPipeline("_id2"); + doThrow(new RuntimeException()).when(processor) + .execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any()); + final Predicate redirectCheck = (idx) -> indexRequest.index().equals(idx); + @SuppressWarnings("unchecked") + final TriConsumer redirectHandler = mock(TriConsumer.class); + @SuppressWarnings("unchecked") + final BiConsumer failureHandler = mock(BiConsumer.class); + @SuppressWarnings("unchecked") + final BiConsumer completionHandler = mock(BiConsumer.class); + ingestService.executeBulkRequest( + 1, + List.of(indexRequest), + indexReq -> {}, + redirectCheck, + redirectHandler, + failureHandler, + completionHandler, + Names.WRITE + ); + verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any()); + verify(redirectHandler, times(1)).apply(eq(0), eq(indexRequest.index()), any(RuntimeException.class)); + verifyNoInteractions(failureHandler); + verify(completionHandler, times(1)).accept(Thread.currentThread(), null); + } + + public void testExecuteFailureRedirectionWithNestedOnFailure() throws Exception { + final Processor processor = mock(Processor.class); + when(processor.isAsync()).thenReturn(true); + final Processor onFailureProcessor = mock(Processor.class); + when(onFailureProcessor.isAsync()).thenReturn(true); + final Processor onFailureOnFailureProcessor = mock(Processor.class); + when(onFailureOnFailureProcessor.isAsync()).thenReturn(true); + final List processors = List.of(onFailureProcessor); + final List onFailureProcessors = List.of(onFailureOnFailureProcessor); + final CompoundProcessor compoundProcessor = new CompoundProcessor( + false, + List.of(processor), + List.of(new CompoundProcessor(false, processors, onFailureProcessors)) + ); + IngestService ingestService = createWithProcessors(Map.of("mock", (factories, tag, description, config) -> compoundProcessor)); + PutPipelineRequest putRequest = new PutPipelineRequest( + "_id", + new BytesArray("{\"processors\": [{\"mock\" : {}}]}"), + XContentType.JSON + ); + ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty + ClusterState previousClusterState = clusterState; + clusterState = executePut(putRequest, clusterState); + ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); + final IndexRequest indexRequest = new IndexRequest("_index").id("_id") + .source(Map.of()) + .setPipeline("_id") + .setFinalPipeline("_none"); + doThrow(new RuntimeException()).when(onFailureOnFailureProcessor) + .execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any()); + doThrow(new RuntimeException()).when(onFailureProcessor) + .execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any()); + doThrow(new RuntimeException()).when(processor) + .execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any()); + final Predicate redirectPredicate = (idx) -> indexRequest.index().equals(idx); + @SuppressWarnings("unchecked") + final TriConsumer redirectHandler = mock(TriConsumer.class); + @SuppressWarnings("unchecked") + final BiConsumer failureHandler = mock(BiConsumer.class); + @SuppressWarnings("unchecked") + final BiConsumer completionHandler = mock(BiConsumer.class); + ingestService.executeBulkRequest( + 1, + List.of(indexRequest), + indexReq -> {}, + redirectPredicate, + redirectHandler, + failureHandler, + completionHandler, + Names.WRITE + ); + verify(processor).execute(eqIndexTypeId(indexRequest.version(), indexRequest.versionType(), Map.of()), any()); + verify(redirectHandler, times(1)).apply(eq(0), eq(indexRequest.index()), any(RuntimeException.class)); + verifyNoInteractions(failureHandler); + verify(completionHandler, times(1)).accept(Thread.currentThread(), null); + } + + public void testBulkRequestExecutionWithRedirectedFailures() throws Exception { + BulkRequest bulkRequest = new BulkRequest(); + String pipelineId = "_id"; + + int numRequest = scaledRandomIntBetween(8, 64); + int numIndexRequests = 0; + for (int i = 0; i < numRequest; i++) { + DocWriteRequest request; + if (randomBoolean()) { + if (randomBoolean()) { + request = new DeleteRequest("_index", "_id"); + } else { + request = new UpdateRequest("_index", "_id"); + } + } else { + IndexRequest indexRequest = new IndexRequest("_index").id("_id").setPipeline(pipelineId).setFinalPipeline("_none"); + indexRequest.source(Requests.INDEX_CONTENT_TYPE, "field1", "value1"); + request = indexRequest; + numIndexRequests++; + } + bulkRequest.add(request); + } + + CompoundProcessor processor = mock(CompoundProcessor.class); + when(processor.isAsync()).thenReturn(true); + when(processor.getProcessors()).thenReturn(List.of(mock(Processor.class))); + Exception error = new RuntimeException(); + doAnswer(args -> { + @SuppressWarnings("unchecked") + BiConsumer handler = (BiConsumer) args.getArguments()[1]; + handler.accept(null, error); + return null; + }).when(processor).execute(any(), any()); + IngestService ingestService = createWithProcessors(Map.of("mock", (factories, tag, description, config) -> processor)); + PutPipelineRequest putRequest = new PutPipelineRequest( + "_id", + new BytesArray("{\"processors\": [{\"mock\" : {}}]}"), + XContentType.JSON + ); + ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty + ClusterState previousClusterState = clusterState; + clusterState = executePut(putRequest, clusterState); + ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); + + @SuppressWarnings("unchecked") + TriConsumer requestItemRedirectHandler = mock(TriConsumer.class); + @SuppressWarnings("unchecked") + BiConsumer requestItemErrorHandler = mock(BiConsumer.class); + @SuppressWarnings("unchecked") + final BiConsumer completionHandler = mock(BiConsumer.class); + ingestService.executeBulkRequest( + numRequest, + bulkRequest.requests(), + indexReq -> {}, + (s) -> true, + requestItemRedirectHandler, + requestItemErrorHandler, + completionHandler, + Names.WRITE + ); + + verify(requestItemRedirectHandler, times(numIndexRequests)).apply(anyInt(), anyString(), argThat(e -> e.getCause().equals(error))); + verifyNoInteractions(requestItemErrorHandler); + verify(completionHandler, times(1)).accept(Thread.currentThread(), null); + } + public void testBulkRequestExecution() throws Exception { BulkRequest bulkRequest = new BulkRequest(); String pipelineId = "_id"; @@ -1612,6 +1872,8 @@ public void testBulkRequestExecution() throws Exception { numRequest, bulkRequest.requests(), indexReq -> {}, + (s) -> false, + (slot, targetIndex, e) -> fail("Should not be redirecting failures"), requestItemErrorHandler, completionHandler, Names.WRITE @@ -1721,7 +1983,16 @@ public String execute() { final IndexRequest indexRequest = new IndexRequest("_index"); indexRequest.setPipeline("_id1").setFinalPipeline("_id2"); indexRequest.source(randomAlphaOfLength(10), randomAlphaOfLength(10)); - ingestService.executeBulkRequest(1, List.of(indexRequest), indexReq -> {}, (integer, e) -> {}, (thread, e) -> {}, Names.WRITE); + ingestService.executeBulkRequest( + 1, + List.of(indexRequest), + indexReq -> {}, + (s) -> false, + (slot, targetIndex, e) -> fail("Should not be redirecting failures"), + (integer, e) -> {}, + (thread, e) -> {}, + Names.WRITE + ); { final IngestStats ingestStats = ingestService.stats(); @@ -1792,7 +2063,16 @@ public void testStats() throws Exception { final IndexRequest indexRequest = new IndexRequest("_index"); indexRequest.setPipeline("_id1").setFinalPipeline("_none"); indexRequest.source(randomAlphaOfLength(10), randomAlphaOfLength(10)); - ingestService.executeBulkRequest(1, List.of(indexRequest), indexReq -> {}, failureHandler, completionHandler, Names.WRITE); + ingestService.executeBulkRequest( + 1, + List.of(indexRequest), + indexReq -> {}, + (s) -> false, + (slot, targetIndex, e) -> fail("Should not be redirecting failures"), + failureHandler, + completionHandler, + Names.WRITE + ); final IngestStats afterFirstRequestStats = ingestService.stats(); assertThat(afterFirstRequestStats.pipelineStats().size(), equalTo(2)); @@ -1809,7 +2089,16 @@ public void testStats() throws Exception { assertProcessorStats(0, afterFirstRequestStats, "_id2", 0, 0, 0); indexRequest.setPipeline("_id2"); - ingestService.executeBulkRequest(1, List.of(indexRequest), indexReq -> {}, failureHandler, completionHandler, Names.WRITE); + ingestService.executeBulkRequest( + 1, + List.of(indexRequest), + indexReq -> {}, + (s) -> false, + (slot, targetIndex, e) -> fail("Should not be redirecting failures"), + failureHandler, + completionHandler, + Names.WRITE + ); final IngestStats afterSecondRequestStats = ingestService.stats(); assertThat(afterSecondRequestStats.pipelineStats().size(), equalTo(2)); // total @@ -1831,7 +2120,16 @@ public void testStats() throws Exception { clusterState = executePut(putRequest, clusterState); ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); indexRequest.setPipeline("_id1"); - ingestService.executeBulkRequest(1, List.of(indexRequest), indexReq -> {}, failureHandler, completionHandler, Names.WRITE); + ingestService.executeBulkRequest( + 1, + List.of(indexRequest), + indexReq -> {}, + (s) -> false, + (slot, targetIndex, e) -> fail("Should not be redirecting failures"), + failureHandler, + completionHandler, + Names.WRITE + ); final IngestStats afterThirdRequestStats = ingestService.stats(); assertThat(afterThirdRequestStats.pipelineStats().size(), equalTo(2)); // total @@ -1854,7 +2152,16 @@ public void testStats() throws Exception { clusterState = executePut(putRequest, clusterState); ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); indexRequest.setPipeline("_id1"); - ingestService.executeBulkRequest(1, List.of(indexRequest), indexReq -> {}, failureHandler, completionHandler, Names.WRITE); + ingestService.executeBulkRequest( + 1, + List.of(indexRequest), + indexReq -> {}, + (s) -> false, + (slot, targetIndex, e) -> fail("Should not be redirecting failures"), + failureHandler, + completionHandler, + Names.WRITE + ); final IngestStats afterForthRequestStats = ingestService.stats(); assertThat(afterForthRequestStats.pipelineStats().size(), equalTo(2)); // total @@ -1946,6 +2253,8 @@ public String getDescription() { bulkRequest.numberOfActions(), bulkRequest.requests(), dropHandler, + (s) -> false, + (slot, targetIndex, e) -> fail("Should not be redirecting failures"), failureHandler, completionHandler, Names.WRITE @@ -2030,7 +2339,16 @@ public void testCBORParsing() throws Exception { .setPipeline("_id") .setFinalPipeline("_none"); - ingestService.executeBulkRequest(1, List.of(indexRequest), indexReq -> {}, (integer, e) -> {}, (thread, e) -> {}, Names.WRITE); + ingestService.executeBulkRequest( + 1, + List.of(indexRequest), + indexReq -> {}, + (s) -> false, + (slot, targetIndex, e) -> fail("Should not be redirecting failures"), + (integer, e) -> {}, + (thread, e) -> {}, + Names.WRITE + ); } assertThat(reference.get(), is(instanceOf(byte[].class))); @@ -2101,7 +2419,16 @@ public void testSetsRawTimestamp() { bulkRequest.add(indexRequest6); bulkRequest.add(indexRequest7); bulkRequest.add(indexRequest8); - ingestService.executeBulkRequest(8, bulkRequest.requests(), indexReq -> {}, (integer, e) -> {}, (thread, e) -> {}, Names.WRITE); + ingestService.executeBulkRequest( + 8, + bulkRequest.requests(), + indexReq -> {}, + (s) -> false, + (slot, targetIndex, e) -> fail("Should not be redirecting failures"), + (integer, e) -> {}, + (thread, e) -> {}, + Names.WRITE + ); assertThat(indexRequest1.getRawTimestamp(), nullValue()); assertThat(indexRequest2.getRawTimestamp(), nullValue()); diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java index d0b30bff92f3e..3a47e0885f2d2 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java @@ -124,7 +124,20 @@ public static DataStream newInstance( @Nullable DataStreamLifecycle lifecycle, List failureStores ) { - return new DataStream(name, indices, generation, metadata, false, replicated, false, false, null, lifecycle, false, failureStores); + return new DataStream( + name, + indices, + generation, + metadata, + false, + replicated, + false, + false, + null, + lifecycle, + failureStores.size() > 0, + failureStores + ); } public static String getLegacyDefaultBackingIndexName( @@ -169,6 +182,25 @@ public static IndexMetadata.Builder createBackingIndex(String dataStreamName, in .numberOfReplicas(NUMBER_OF_REPLICAS); } + public static IndexMetadata.Builder createFirstFailureStore(String dataStreamName) { + return createFailureStore(dataStreamName, 1, System.currentTimeMillis()); + } + + public static IndexMetadata.Builder createFirstFailureStore(String dataStreamName, long epochMillis) { + return createFailureStore(dataStreamName, 1, epochMillis); + } + + public static IndexMetadata.Builder createFailureStore(String dataStreamName, int generation) { + return createFailureStore(dataStreamName, generation, System.currentTimeMillis()); + } + + public static IndexMetadata.Builder createFailureStore(String dataStreamName, int generation, long epochMillis) { + return IndexMetadata.builder(DataStream.getDefaultFailureStoreName(dataStreamName, generation, epochMillis)) + .settings(SETTINGS) + .numberOfShards(NUMBER_OF_SHARDS) + .numberOfReplicas(NUMBER_OF_REPLICAS); + } + public static IndexMetadata.Builder getIndexMetadataBuilderForIndex(Index index) { return IndexMetadata.builder(index.getName()) .settings(Settings.builder().put(SETTINGS.build()).put(SETTING_INDEX_UUID, index.getUUID())) From 957419c1643ebe6985eed3c8399fc34793fde325 Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Mon, 5 Feb 2024 14:46:34 -0500 Subject: [PATCH 033/106] [ML] Setting the request service queue capacity and allow it to be adjusted (#105037) * Bounding queue capacity and allowing it to be adjusted * Adding some deadlock tests * Adding some more tests for the request executor and queue logic * Adding debug message * Retaining overflow items after capacity change * Addressing feedback --- .../xpack/inference/InferencePlugin.java | 4 +- .../AdjustableCapacityBlockingQueue.java | 184 ++++++++ .../external/http/RequestExecutor.java | 29 ++ .../http/sender/HttpRequestSenderFactory.java | 14 +- .../external/http/sender/HttpTask.java | 16 - .../{ShutdownTask.java => NoopTask.java} | 8 +- ...rvice.java => RequestExecutorService.java} | 215 +++++---- .../RequestExecutorServiceSettings.java | 65 +++ .../external/http/sender/RequestTask.java | 3 +- .../elasticsearch/xpack/inference/Utils.java | 4 +- .../AdjustableCapacityBlockingQueueTests.java | 249 +++++++++++ .../HttpRequestExecutorServiceTests.java | 264 ----------- .../RequestExecutorServiceSettingsTests.java | 33 ++ .../sender/RequestExecutorServiceTests.java | 423 ++++++++++++++++++ 14 files changed, 1108 insertions(+), 403 deletions(-) create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/AdjustableCapacityBlockingQueue.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/RequestExecutor.java delete mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpTask.java rename x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/{ShutdownTask.java => NoopTask.java} (78%) rename x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/{HttpRequestExecutorService.java => RequestExecutorService.java} (62%) create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceSettings.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/AdjustableCapacityBlockingQueueTests.java delete mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorServiceTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceSettingsTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 4e44929e7ba9b..905a92e899784 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -51,6 +51,7 @@ import org.elasticsearch.xpack.inference.external.http.HttpSettings; import org.elasticsearch.xpack.inference.external.http.retry.RetrySettings; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; +import org.elasticsearch.xpack.inference.external.http.sender.RequestExecutorServiceSettings; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.elasticsearch.xpack.inference.registry.ModelRegistryImpl; import org.elasticsearch.xpack.inference.rest.RestDeleteInferenceModelAction; @@ -223,7 +224,8 @@ public List> getSettings() { HttpRequestSenderFactory.HttpRequestSender.getSettings(), ThrottlerManager.getSettings(), RetrySettings.getSettingsDefinitions(), - Truncator.getSettings() + Truncator.getSettings(), + RequestExecutorServiceSettings.getSettingsDefinitions() ).flatMap(Collection::stream).collect(Collectors.toList()); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/AdjustableCapacityBlockingQueue.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/AdjustableCapacityBlockingQueue.java new file mode 100644 index 0000000000000..e73151b44a3e4 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/AdjustableCapacityBlockingQueue.java @@ -0,0 +1,184 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.common; + +import org.elasticsearch.core.Nullable; + +import java.util.Collection; +import java.util.Objects; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.locks.ReentrantReadWriteLock; + +/** + * Provides a limited functionality queue that can have its capacity adjusted. + * @param the items to store in the queue + */ +public class AdjustableCapacityBlockingQueue { + + private BlockingQueue currentQueue; + /** + * When the capacity of the {@link AdjustableCapacityBlockingQueue#currentQueue} changes, any items that can't fit in the new queue + * will be placed in this secondary queue (the items from the front of the queue first). Then when an operation occurs to remove an + * item from the queue we'll attempt to grab it from this secondary queue first. That way we guarantee that items that were in the + * old queue will be read first. + */ + private final BlockingQueue prioritizedReadingQueue; + private final QueueCreator queueCreator; + private final ReentrantReadWriteLock lock; + + /** + * Constructs the adjustable capacity queue + * @param queueCreator a {@link QueueCreator} object for handling how to create the {@link BlockingQueue} + * @param initialCapacity the initial capacity of the queue, if null the queue will be unbounded + */ + public AdjustableCapacityBlockingQueue(QueueCreator queueCreator, @Nullable Integer initialCapacity) { + this.queueCreator = Objects.requireNonNull(queueCreator); + currentQueue = createCurrentQueue(queueCreator, initialCapacity); + lock = new ReentrantReadWriteLock(); + prioritizedReadingQueue = queueCreator.create(); + } + + private static BlockingQueue createCurrentQueue(QueueCreator queueCreator, @Nullable Integer initialCapacity) { + if (initialCapacity == null) { + return queueCreator.create(); + } + + return queueCreator.create(initialCapacity); + } + + /** + * Sets the capacity of the queue. If the new capacity is smaller than the current number of elements in the queue, the + * elements that exceed the new capacity are retained. In this situation the {@link AdjustableCapacityBlockingQueue#size()} method + * could return a value greater than the specified capacity. + *
+ * This is potentially an expensive operation because a new internal queue is instantiated. + * @param newCapacity the new capacity for the queue + */ + public void setCapacity(int newCapacity) { + final ReentrantReadWriteLock.WriteLock writeLock = lock.writeLock(); + + writeLock.lock(); + try { + BlockingQueue newQueue = queueCreator.create(newCapacity); + // Drain the first items from the queue, so they will get read first. + // Only drain the amount that wouldn't fit in the new queue + // If the new capacity is larger than the current queue size then we don't need to drain any + // they will all fit within the newly created queue. In this situation the queue size - capacity + // would result in a negative value which is ignored + if (currentQueue.size() > newCapacity) { + currentQueue.drainTo(prioritizedReadingQueue, currentQueue.size() - newCapacity); + } + currentQueue.drainTo(newQueue, newCapacity); + currentQueue = newQueue; + } finally { + writeLock.unlock(); + } + } + + public boolean offer(E item) { + final ReentrantReadWriteLock.ReadLock readLock = lock.readLock(); + + readLock.lock(); + try { + return currentQueue.offer(item); + } finally { + readLock.unlock(); + } + } + + public int drainTo(Collection c) { + return drainTo(c, Integer.MAX_VALUE); + } + + public int drainTo(Collection c, int maxElements) { + final ReentrantReadWriteLock.ReadLock readLock = lock.readLock(); + + readLock.lock(); + try { + var numberOfDrainedOldItems = prioritizedReadingQueue.drainTo(c, maxElements); + var numberOfDrainedCurrentItems = currentQueue.drainTo(c, maxElements - numberOfDrainedOldItems); + + return numberOfDrainedCurrentItems + numberOfDrainedOldItems; + } finally { + readLock.unlock(); + } + } + + public E poll(long timeout, TimeUnit timeUnit) throws InterruptedException { + final ReentrantReadWriteLock.ReadLock readLock = lock.readLock(); + + readLock.lockInterruptibly(); + try { + // no new items should be added to the old queue, so we shouldn't need to wait on it + var oldItem = prioritizedReadingQueue.poll(); + + if (oldItem != null) { + return oldItem; + } + + return currentQueue.poll(timeout, timeUnit); + } finally { + readLock.unlock(); + } + } + + public E take() throws InterruptedException { + final ReentrantReadWriteLock.ReadLock readLock = lock.readLock(); + + readLock.lockInterruptibly(); + try { + var oldItem = prioritizedReadingQueue.poll(); + + if (oldItem != null) { + return oldItem; + } + + return currentQueue.take(); + } finally { + readLock.unlock(); + } + } + + /** + * Returns the number of elements stored in the queue. If the capacity was recently changed, the value returned could be + * greater than the capacity. This occurs when the capacity was reduced and there were more elements in the queue than the + * new capacity. + * @return the number of elements in the queue. + */ + public int size() { + return currentQueue.size() + prioritizedReadingQueue.size(); + } + + /** + * The number of additional elements that his queue can accept without blocking. + */ + public int remainingCapacity() { + return currentQueue.remainingCapacity(); + } + + /** + * Provides a contract for creating a {@link BlockingQueue} + * @param items to store in the queue + */ + public interface QueueCreator { + + /** + * Creates a new {@link BlockingQueue} with the specified capacity. + * @param capacity the number of items that can be stored in the queue + * @return a new {@link BlockingQueue} + */ + BlockingQueue create(int capacity); + + /** + * Creates a new {@link BlockingQueue} with an unbounded capacity. + * @return a new {@link BlockingQueue} + */ + BlockingQueue create(); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/RequestExecutor.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/RequestExecutor.java new file mode 100644 index 0000000000000..5c8fa62ba88f9 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/RequestExecutor.java @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.xpack.inference.external.request.HttpRequest; + +import java.util.concurrent.TimeUnit; + +public interface RequestExecutor { + void start(); + + void shutdown(); + + boolean isShutdown(); + + boolean isTerminated(); + + boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException; + + void execute(HttpRequest request, @Nullable TimeValue timeout, ActionListener listener); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactory.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactory.java index edceb8324fbc9..c773f57933415 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactory.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactory.java @@ -78,7 +78,7 @@ public static final class HttpRequestSender implements Sender { private final ThreadPool threadPool; private final HttpClientManager manager; - private final HttpRequestExecutorService service; + private final RequestExecutorService service; private final AtomicBoolean started = new AtomicBoolean(false); private volatile TimeValue maxRequestTimeout; private final CountDownLatch startCompleted = new CountDownLatch(2); @@ -92,7 +92,13 @@ private HttpRequestSender( ) { this.threadPool = Objects.requireNonNull(threadPool); this.manager = Objects.requireNonNull(httpClientManager); - service = new HttpRequestExecutorService(serviceName, manager.getHttpClient(), threadPool, startCompleted); + service = new RequestExecutorService( + serviceName, + manager.getHttpClient(), + threadPool, + startCompleted, + new RequestExecutorServiceSettings(settings, clusterService) + ); this.maxRequestTimeout = MAX_REQUEST_TIMEOUT.get(settings); addSettingsUpdateConsumers(clusterService); @@ -138,7 +144,7 @@ public void close() throws IOException { public void send(HttpRequest request, @Nullable TimeValue timeout, ActionListener listener) { assert started.get() : "call start() before sending a request"; waitForStartToComplete(); - service.send(request, timeout, listener); + service.execute(request, timeout, listener); } private void waitForStartToComplete() { @@ -159,7 +165,7 @@ private void waitForStartToComplete() { public void send(HttpRequest request, ActionListener listener) { assert started.get() : "call start() before sending a request"; waitForStartToComplete(); - service.send(request, maxRequestTimeout, listener); + service.execute(request, maxRequestTimeout, listener); } public static List> getSettings() { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpTask.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpTask.java deleted file mode 100644 index 6881d75524bda..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpTask.java +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.external.http.sender; - -import org.elasticsearch.common.util.concurrent.AbstractRunnable; - -abstract class HttpTask extends AbstractRunnable { - public boolean shouldShutdown() { - return false; - } -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ShutdownTask.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/NoopTask.java similarity index 78% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ShutdownTask.java rename to x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/NoopTask.java index 9ec2edf514e80..c5e533eb7d8fe 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ShutdownTask.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/NoopTask.java @@ -7,11 +7,9 @@ package org.elasticsearch.xpack.inference.external.http.sender; -class ShutdownTask extends HttpTask { - @Override - public boolean shouldShutdown() { - return true; - } +import org.elasticsearch.common.util.concurrent.AbstractRunnable; + +class NoopTask extends AbstractRunnable { @Override public void onFailure(Exception e) {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java similarity index 62% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorService.java rename to x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java index 84aac7cde6bf5..47b4d49b8f46e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java @@ -11,29 +11,27 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.core.Strings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.common.AdjustableCapacityBlockingQueue; import org.elasticsearch.xpack.inference.external.http.HttpClient; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.RequestExecutor; import org.elasticsearch.xpack.inference.external.request.HttpRequest; import java.util.ArrayList; -import java.util.Collection; import java.util.List; import java.util.Objects; import java.util.concurrent.BlockingQueue; -import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Future; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Consumer; import static org.elasticsearch.core.Strings.format; @@ -49,51 +47,103 @@ * attempting to execute a task (aka waiting for the connection manager to lease a connection). See * {@link org.apache.http.client.config.RequestConfig.Builder#setConnectionRequestTimeout} for more info. */ -class HttpRequestExecutorService implements ExecutorService { - private static final Logger logger = LogManager.getLogger(HttpRequestExecutorService.class); +class RequestExecutorService implements RequestExecutor { + + private static final AdjustableCapacityBlockingQueue.QueueCreator QUEUE_CREATOR = + new AdjustableCapacityBlockingQueue.QueueCreator<>() { + @Override + public BlockingQueue create(int capacity) { + BlockingQueue queue; + if (capacity <= 0) { + queue = create(); + } else { + queue = new LinkedBlockingQueue<>(capacity); + } + + return queue; + } + + @Override + public BlockingQueue create() { + return new LinkedBlockingQueue<>(); + } + }; + private static final Logger logger = LogManager.getLogger(RequestExecutorService.class); private final String serviceName; - private final BlockingQueue queue; + private final AdjustableCapacityBlockingQueue queue; private final AtomicBoolean running = new AtomicBoolean(true); private final CountDownLatch terminationLatch = new CountDownLatch(1); private final HttpClientContext httpContext; private final HttpClient httpClient; private final ThreadPool threadPool; private final CountDownLatch startupLatch; + private final BlockingQueue controlQueue = new LinkedBlockingQueue<>(); - @SuppressForbidden(reason = "wraps a queue and handles errors appropriately") - HttpRequestExecutorService(String serviceName, HttpClient httpClient, ThreadPool threadPool, @Nullable CountDownLatch startupLatch) { - this(serviceName, httpClient, threadPool, new LinkedBlockingQueue<>(), startupLatch); - } - - @SuppressForbidden(reason = "wraps a queue and handles errors appropriately") - HttpRequestExecutorService( + RequestExecutorService( String serviceName, HttpClient httpClient, ThreadPool threadPool, - int capacity, - @Nullable CountDownLatch startupLatch + @Nullable CountDownLatch startupLatch, + RequestExecutorServiceSettings settings ) { - this(serviceName, httpClient, threadPool, new LinkedBlockingQueue<>(capacity), startupLatch); + this(serviceName, httpClient, threadPool, QUEUE_CREATOR, startupLatch, settings); + } + + private static BlockingQueue buildQueue(int capacity) { + BlockingQueue queue; + if (capacity <= 0) { + queue = new LinkedBlockingQueue<>(); + } else { + queue = new LinkedBlockingQueue<>(capacity); + } + + return queue; } /** * This constructor should only be used directly for testing. */ - @SuppressForbidden(reason = "wraps a queue and handles errors appropriately") - HttpRequestExecutorService( + RequestExecutorService( String serviceName, HttpClient httpClient, ThreadPool threadPool, - BlockingQueue queue, - @Nullable CountDownLatch startupLatch + AdjustableCapacityBlockingQueue.QueueCreator createQueue, + @Nullable CountDownLatch startupLatch, + RequestExecutorServiceSettings settings ) { this.serviceName = Objects.requireNonNull(serviceName); this.httpClient = Objects.requireNonNull(httpClient); this.threadPool = Objects.requireNonNull(threadPool); this.httpContext = HttpClientContext.create(); - this.queue = queue; + this.queue = new AdjustableCapacityBlockingQueue<>(createQueue, settings.getQueueCapacity()); this.startupLatch = startupLatch; + + Objects.requireNonNull(settings); + settings.registerQueueCapacityCallback(this::onCapacityChange); + } + + private void onCapacityChange(int capacity) { + logger.debug(() -> Strings.format("Setting queue capacity to [%s]", capacity)); + + var enqueuedCapacityCommand = controlQueue.offer(() -> updateCapacity(capacity)); + if (enqueuedCapacityCommand == false) { + logger.warn("Failed to change request batching service queue capacity. Control queue was full, please try again later."); + } else { + // ensure that the task execution loop wakes up + queue.offer(new NoopTask()); + } + } + + private void updateCapacity(int newCapacity) { + try { + queue.setCapacity(newCapacity); + } catch (Exception e) { + logger.warn( + format("Failed to set the capacity of the task queue to [%s] for request batching service [%s]", newCapacity, serviceName), + e + ); + } } /** @@ -125,13 +175,18 @@ private void signalStartInitiated() { * Protects the task retrieval logic from an unexpected exception. * * @throws InterruptedException rethrows the exception if it occurred retrieving a task because the thread is likely attempting to - * shut down + * shut down */ private void handleTasks() throws InterruptedException { try { - HttpTask task = queue.take(); - if (task.shouldShutdown() || running.get() == false) { - running.set(false); + AbstractRunnable task = queue.take(); + + var command = controlQueue.poll(); + if (command != null) { + command.run(); + } + + if (running.get() == false) { logger.debug(() -> format("Http executor service [%s] exiting", serviceName)); } else { executeTask(task); @@ -143,7 +198,7 @@ private void handleTasks() throws InterruptedException { } } - private void executeTask(HttpTask task) { + private void executeTask(AbstractRunnable task) { try { task.run(); } catch (Exception e) { @@ -155,18 +210,16 @@ private synchronized void notifyRequestsOfShutdown() { assert isShutdown() : "Requests should only be notified if the executor is shutting down"; try { - List notExecuted = new ArrayList<>(); + List notExecuted = new ArrayList<>(); queue.drainTo(notExecuted); - for (HttpTask task : notExecuted) { - rejectTask(task); - } + rejectTasks(notExecuted, this::rejectTaskBecauseOfShutdown); } catch (Exception e) { logger.warn(format("Failed to notify tasks of queuing service [%s] shutdown", serviceName)); } } - private void rejectTask(HttpTask task) { + private void rejectTaskBecauseOfShutdown(AbstractRunnable task) { try { task.onRejection( new EsRejectedExecutionException( @@ -181,6 +234,12 @@ private void rejectTask(HttpTask task) { } } + private void rejectTasks(List tasks, Consumer rejectionFunction) { + for (var task : tasks) { + rejectionFunction.accept(task); + } + } + public int queueSize() { return queue.size(); } @@ -189,16 +248,10 @@ public int queueSize() { public void shutdown() { if (running.compareAndSet(true, false)) { // if this fails because the queue is full, that's ok, we just want to ensure that queue.take() returns - queue.offer(new ShutdownTask()); + queue.offer(new NoopTask()); } } - @Override - public List shutdownNow() { - shutdown(); - return new ArrayList<>(queue); - } - @Override public boolean isShutdown() { return running.get() == false; @@ -216,13 +269,14 @@ public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedE /** * Send the request at some point in the future. - * @param request the http request to send - * @param timeout the maximum time to wait for this request to complete (failing or succeeding). Once the time elapses, the - * listener::onFailure is called with a {@link org.elasticsearch.ElasticsearchTimeoutException}. - * If null, then the request will wait forever + * + * @param request the http request to send + * @param timeout the maximum time to wait for this request to complete (failing or succeeding). Once the time elapses, the + * listener::onFailure is called with a {@link org.elasticsearch.ElasticsearchTimeoutException}. + * If null, then the request will wait forever * @param listener an {@link ActionListener} for the response or failure */ - public void send(HttpRequest request, @Nullable TimeValue timeout, ActionListener listener) { + public void execute(HttpRequest request, @Nullable TimeValue timeout, ActionListener listener) { RequestTask task = new RequestTask(request, httpClient, httpContext, timeout, threadPool, listener); if (isShutdown()) { @@ -251,69 +305,8 @@ public void send(HttpRequest request, @Nullable TimeValue timeout, ActionListene } } - /** - * This method is not supported. Use {@link #send} instead. - * @param runnable the runnable task - */ - @Override - public void execute(Runnable runnable) { - throw new UnsupportedOperationException("use send instead"); - } - - /** - * This method is not supported. Use {@link #send} instead. - */ - @Override - public Future submit(Callable task) { - throw new UnsupportedOperationException("use send instead"); - } - - /** - * This method is not supported. Use {@link #send} instead. - */ - @Override - public Future submit(Runnable task, T result) { - throw new UnsupportedOperationException("use send instead"); - } - - /** - * This method is not supported. Use {@link #send} instead. - */ - @Override - public Future submit(Runnable task) { - throw new UnsupportedOperationException("use send instead"); - } - - /** - * This method is not supported. Use {@link #send} instead. - */ - @Override - public List> invokeAll(Collection> tasks) throws InterruptedException { - throw new UnsupportedOperationException("use send instead"); - } - - /** - * This method is not supported. Use {@link #send} instead. - */ - @Override - public List> invokeAll(Collection> tasks, long timeout, TimeUnit unit) throws InterruptedException { - throw new UnsupportedOperationException("use send instead"); - } - - /** - * This method is not supported. Use {@link #send} instead. - */ - @Override - public T invokeAny(Collection> tasks) throws InterruptedException, ExecutionException { - throw new UnsupportedOperationException("use send instead"); - } - - /** - * This method is not supported. Use {@link #send} instead. - */ - @Override - public T invokeAny(Collection> tasks, long timeout, TimeUnit unit) throws InterruptedException, - ExecutionException, TimeoutException { - throw new UnsupportedOperationException("use send instead"); + // default for testing + int remainingQueueCapacity() { + return queue.remainingCapacity(); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceSettings.java new file mode 100644 index 0000000000000..86825035f2d05 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceSettings.java @@ -0,0 +1,65 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Consumer; + +public class RequestExecutorServiceSettings { + + /** + * The capacity of the internal queue. Zero is considered unlimited. If a positive value is used, the queue will reject entries + * once it is full. + */ + static final Setting TASK_QUEUE_CAPACITY_SETTING = Setting.intSetting( + "xpack.inference.http.request_executor.queue_capacity", + 2000, + 0, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + public static List> getSettingsDefinitions() { + return List.of(TASK_QUEUE_CAPACITY_SETTING); + } + + private volatile int queueCapacity; + private final List> queueCapacityCallbacks = new ArrayList>(); + + public RequestExecutorServiceSettings(Settings settings, ClusterService clusterService) { + queueCapacity = TASK_QUEUE_CAPACITY_SETTING.get(settings); + + addSettingsUpdateConsumers(clusterService); + } + + private void addSettingsUpdateConsumers(ClusterService clusterService) { + clusterService.getClusterSettings().addSettingsUpdateConsumer(TASK_QUEUE_CAPACITY_SETTING, this::setQueueCapacity); + } + + // default for testing + void setQueueCapacity(int queueCapacity) { + this.queueCapacity = queueCapacity; + + for (var callback : queueCapacityCallbacks) { + callback.accept(queueCapacity); + } + } + + void registerQueueCapacityCallback(Consumer onChangeCapacityCallback) { + queueCapacityCallbacks.add(onChangeCapacityCallback); + } + + int getQueueCapacity() { + return queueCapacity; + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java index 2eefff791b709..cc65d16af652c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTask.java @@ -13,6 +13,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.threadpool.Scheduler; @@ -27,7 +28,7 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.inference.InferencePlugin.UTILITY_THREAD_POOL_NAME; -class RequestTask extends HttpTask { +class RequestTask extends AbstractRunnable { private static final Logger logger = LogManager.getLogger(RequestTask.class); private static final Scheduler.Cancellable NOOP_TIMEOUT_HANDLER = createDefaultHandler(); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java index 356caecf8fadb..5b7ffb3c8153e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java @@ -17,6 +17,7 @@ import org.elasticsearch.xpack.inference.external.http.HttpSettings; import org.elasticsearch.xpack.inference.external.http.retry.RetrySettings; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; +import org.elasticsearch.xpack.inference.external.http.sender.RequestExecutorServiceSettings; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import java.util.Collection; @@ -41,7 +42,8 @@ public static ClusterService mockClusterService(Settings settings) { HttpRequestSenderFactory.HttpRequestSender.getSettings(), ThrottlerManager.getSettings(), RetrySettings.getSettingsDefinitions(), - Truncator.getSettings() + Truncator.getSettings(), + RequestExecutorServiceSettings.getSettingsDefinitions() ).flatMap(Collection::stream).collect(Collectors.toSet()); var cSettings = new ClusterSettings(settings, registeredSettings); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/AdjustableCapacityBlockingQueueTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/AdjustableCapacityBlockingQueueTests.java new file mode 100644 index 0000000000000..09cd065ce3cd0 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/AdjustableCapacityBlockingQueueTests.java @@ -0,0 +1,249 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.common; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.After; +import org.junit.Before; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.hamcrest.Matchers.is; + +public class AdjustableCapacityBlockingQueueTests extends ESTestCase { + private static final AdjustableCapacityBlockingQueue.QueueCreator QUEUE_CREATOR = + new AdjustableCapacityBlockingQueue.QueueCreator<>() { + @Override + public BlockingQueue create(int capacity) { + return new LinkedBlockingQueue<>(capacity); + } + + @Override + public BlockingQueue create() { + return new LinkedBlockingQueue<>(); + } + }; + + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + private ThreadPool threadPool; + + @Before + public void init() { + threadPool = createThreadPool(inferenceUtilityPool()); + } + + @After + public void shutdown() { + terminate(threadPool); + } + + public void testSetCapacity_ChangesTheQueueCapacityToTwo() { + var queue = new AdjustableCapacityBlockingQueue<>(QUEUE_CREATOR, 1); + assertThat(queue.remainingCapacity(), is(1)); + + queue.setCapacity(2); + assertThat(queue.remainingCapacity(), is(2)); + } + + public void testInitiallySetsCapacityToUnbounded_WhenCapacityIsNull() { + assertThat(new AdjustableCapacityBlockingQueue<>(QUEUE_CREATOR, null).remainingCapacity(), is(Integer.MAX_VALUE)); + } + + public void testSetCapacity_RemainingCapacityIsZero_WhenReducingTheQueueCapacityToOne_WhenItemsExistInTheQueue() + throws InterruptedException { + var queue = new AdjustableCapacityBlockingQueue<>(QUEUE_CREATOR, 2); + assertThat(queue.remainingCapacity(), is(2)); + assertThat(queue.size(), is(0)); + + queue.offer(0); + queue.offer(1); + assertThat(queue.remainingCapacity(), is(0)); + + queue.setCapacity(1); + assertThat(queue.remainingCapacity(), is(0)); + assertThat(queue.size(), is(2)); + assertThat(queue.take(), is(0)); + assertThat(queue.take(), is(1)); + } + + public void testSetCapacity_RetainsOrdering_WhenReturningItems_AfterDecreasingCapacity() { + var queue = new AdjustableCapacityBlockingQueue<>(QUEUE_CREATOR, 3); + assertThat(queue.size(), is(0)); + + queue.offer(0); + queue.offer(1); + queue.offer(2); + assertThat(queue.size(), is(3)); + + queue.setCapacity(2); + + var entriesList = new ArrayList(); + assertThat(queue.drainTo(entriesList), is(3)); + + assertThat(queue.size(), is(0)); + assertThat(entriesList, is(List.of(0, 1, 2))); + } + + public void testSetCapacity_RetainsOrdering_WhenReturningItems_AfterIncreasingCapacity() { + var queue = new AdjustableCapacityBlockingQueue<>(QUEUE_CREATOR, 2); + assertThat(queue.size(), is(0)); + + queue.offer(0); + queue.offer(1); + assertThat(queue.size(), is(2)); + + queue.setCapacity(3); + + queue.offer(2); + + var entriesList = new ArrayList(); + assertThat(queue.drainTo(entriesList), is(3)); + + assertThat(queue.size(), is(0)); + assertThat(entriesList, is(List.of(0, 1, 2))); + } + + public void testSetCapacity_RetainsOrdering_WhenReturningItems_AfterDecreasingCapacity_UsingTake() throws InterruptedException { + var queue = new AdjustableCapacityBlockingQueue<>(QUEUE_CREATOR, 3); + assertThat(queue.size(), is(0)); + + queue.offer(0); + queue.offer(1); + queue.offer(2); + assertThat(queue.size(), is(3)); + + queue.setCapacity(2); + + assertThat(queue.take(), is(0)); + assertThat(queue.take(), is(1)); + assertThat(queue.take(), is(2)); + + assertThat(queue.size(), is(0)); + } + + public void testSetCapacity_RetainsOrdering_WhenReturningItems_AfterIncreasingCapacity_UsingTake() throws InterruptedException { + var queue = new AdjustableCapacityBlockingQueue<>(QUEUE_CREATOR, 2); + assertThat(queue.size(), is(0)); + + queue.offer(0); + queue.offer(1); + assertThat(queue.size(), is(2)); + + queue.setCapacity(3); + + queue.offer(2); + + assertThat(queue.take(), is(0)); + assertThat(queue.take(), is(1)); + assertThat(queue.take(), is(2)); + + assertThat(queue.size(), is(0)); + } + + public void testOffer_AddsItemToTheQueue() { + var queue = new AdjustableCapacityBlockingQueue<>(QUEUE_CREATOR, 1); + assertThat(queue.size(), is(0)); + + queue.offer(0); + assertThat(queue.size(), is(1)); + } + + public void testDrainTo_MovesAllItemsFromQueueToList() { + var queue = new AdjustableCapacityBlockingQueue<>(QUEUE_CREATOR, 2); + assertThat(queue.size(), is(0)); + + queue.offer(0); + queue.offer(1); + assertThat(queue.size(), is(2)); + + var entriesList = new ArrayList(); + queue.drainTo(entriesList); + + assertThat(queue.size(), is(0)); + assertThat(entriesList, is(List.of(0, 1))); + } + + public void testDrainTo_MovesOnlyOneItemFromQueueToList() { + var queue = new AdjustableCapacityBlockingQueue<>(QUEUE_CREATOR, 2); + assertThat(queue.size(), is(0)); + + queue.offer(0); + queue.offer(1); + assertThat(queue.size(), is(2)); + + var entriesList = new ArrayList(); + assertThat(queue.drainTo(entriesList, 1), is(1)); + + assertThat(queue.size(), is(1)); + assertThat(entriesList, is(List.of(0))); + } + + public void testPoll_RemovesAnItemFromTheQueue_AfterItBecomesAvailable() throws ExecutionException, InterruptedException, + TimeoutException { + var queue = new AdjustableCapacityBlockingQueue<>(QUEUE_CREATOR, 1); + assertThat(queue.size(), is(0)); + + var waitForOfferCallLatch = new CountDownLatch(1); + + Future pollFuture = threadPool.generic().submit(() -> { + try { + waitForOfferCallLatch.await(TIMEOUT.getSeconds(), TimeUnit.SECONDS); + return queue.poll(TIMEOUT.getSeconds(), TimeUnit.SECONDS); + } catch (Exception e) { + fail(Strings.format("Failed to polling queue: %s", e)); + } + + return null; + }); + + queue.offer(0); + assertThat(queue.size(), is(1)); + waitForOfferCallLatch.countDown(); + + assertThat(pollFuture.get(TIMEOUT.getSeconds(), TimeUnit.SECONDS), is(0)); + + assertThat(queue.size(), is(0)); + } + + public void testTake_RemovesItemFromQueue() throws InterruptedException { + var queue = new AdjustableCapacityBlockingQueue<>(QUEUE_CREATOR, 1); + assertThat(queue.size(), is(0)); + + queue.offer(0); + assertThat(queue.size(), is(1)); + + assertThat(queue.take(), is(0)); + assertThat(queue.size(), is(0)); + } + + public static AdjustableCapacityBlockingQueue.QueueCreator mockQueueCreator(BlockingQueue backingQueue) { + return new AdjustableCapacityBlockingQueue.QueueCreator<>() { + @Override + public BlockingQueue create(int capacity) { + return backingQueue; + } + + @Override + public BlockingQueue create() { + return backingQueue; + } + }; + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorServiceTests.java deleted file mode 100644 index f25312260bfd0..0000000000000 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorServiceTests.java +++ /dev/null @@ -1,264 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.external.http.sender; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ElasticsearchTimeoutException; -import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xpack.inference.external.http.HttpClient; -import org.elasticsearch.xpack.inference.external.http.HttpResult; -import org.elasticsearch.xpack.inference.external.request.HttpRequestTests; -import org.junit.After; -import org.junit.Before; - -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.Future; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.TimeUnit; - -import static org.elasticsearch.core.Strings.format; -import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; -import static org.elasticsearch.xpack.inference.external.http.HttpClientTests.createHttpPost; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -public class HttpRequestExecutorServiceTests extends ESTestCase { - private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); - private ThreadPool threadPool; - - @Before - public void init() { - threadPool = createThreadPool(inferenceUtilityPool()); - } - - @After - public void shutdown() { - terminate(threadPool); - } - - public void testQueueSize_IsEmpty() { - var service = new HttpRequestExecutorService(getTestName(), mock(HttpClient.class), threadPool, null); - - assertThat(service.queueSize(), is(0)); - } - - public void testQueueSize_IsOne() { - var service = new HttpRequestExecutorService(getTestName(), mock(HttpClient.class), threadPool, null); - service.send(HttpRequestTests.createMock("inferenceEntityId"), null, new PlainActionFuture<>()); - - assertThat(service.queueSize(), is(1)); - } - - public void testExecute_ThrowsUnsupported() { - var service = new HttpRequestExecutorService(getTestName(), mock(HttpClient.class), threadPool, null); - var noopTask = mock(RequestTask.class); - - var thrownException = expectThrows(UnsupportedOperationException.class, () -> service.execute(noopTask)); - assertThat(thrownException.getMessage(), is("use send instead")); - } - - public void testIsTerminated_IsFalse() { - var service = new HttpRequestExecutorService(getTestName(), mock(HttpClient.class), threadPool, null); - - assertFalse(service.isTerminated()); - } - - public void testIsTerminated_IsTrue() throws InterruptedException { - var latch = new CountDownLatch(1); - var service = new HttpRequestExecutorService(getTestName(), mock(HttpClient.class), threadPool, latch); - - service.shutdown(); - service.start(); - latch.await(TIMEOUT.getSeconds(), TimeUnit.SECONDS); - - assertTrue(service.isTerminated()); - } - - public void testIsTerminated_AfterStopFromSeparateThread() throws Exception { - var waitToShutdown = new CountDownLatch(1); - - var mockHttpClient = mock(HttpClient.class); - doAnswer(invocation -> { - waitToShutdown.countDown(); - return Void.TYPE; - }).when(mockHttpClient).send(any(), any(), any()); - - var service = new HttpRequestExecutorService(getTestName(), mockHttpClient, threadPool, null); - - Future executorTermination = threadPool.generic().submit(() -> { - try { - // wait for a task to be added to be executed before beginning shutdown - waitToShutdown.await(TIMEOUT.getSeconds(), TimeUnit.SECONDS); - service.shutdown(); - service.awaitTermination(TIMEOUT.getSeconds(), TimeUnit.SECONDS); - } catch (Exception e) { - fail(Strings.format("Failed to shutdown executor: %s", e)); - } - }); - - PlainActionFuture listener = new PlainActionFuture<>(); - service.send(HttpRequestTests.createMock("inferenceEntityId"), null, listener); - - service.start(); - - try { - executorTermination.get(1, TimeUnit.SECONDS); - } catch (Exception e) { - fail(Strings.format("Executor finished before it was signaled to shutdown: %s", e)); - } - - assertTrue(service.isShutdown()); - assertTrue(service.isTerminated()); - } - - public void testSend_AfterShutdown_Throws() { - var service = new HttpRequestExecutorService("test_service", mock(HttpClient.class), threadPool, null); - - service.shutdown(); - - var listener = new PlainActionFuture(); - service.send(HttpRequestTests.createMock("inferenceEntityId"), null, listener); - - var thrownException = expectThrows(EsRejectedExecutionException.class, () -> listener.actionGet(TIMEOUT)); - - assertThat( - thrownException.getMessage(), - is("Failed to enqueue task because the http executor service [test_service] has already shutdown") - ); - } - - public void testSend_Throws_WhenQueueIsFull() { - var service = new HttpRequestExecutorService("test_service", mock(HttpClient.class), threadPool, 1, null); - - service.send(HttpRequestTests.createMock("inferenceEntityId"), null, new PlainActionFuture<>()); - var listener = new PlainActionFuture(); - service.send(HttpRequestTests.createMock("inferenceEntityId"), null, listener); - - var thrownException = expectThrows(EsRejectedExecutionException.class, () -> listener.actionGet(TIMEOUT)); - - assertThat( - thrownException.getMessage(), - is("Failed to execute task because the http executor service [test_service] queue is full") - ); - } - - public void testTaskThrowsError_CallsOnFailure() throws Exception { - var httpClient = mock(HttpClient.class); - - var service = new HttpRequestExecutorService(getTestName(), httpClient, threadPool, null); - - doAnswer(invocation -> { - service.shutdown(); - throw new IllegalArgumentException("failed"); - }).when(httpClient).send(any(), any(), any()); - - PlainActionFuture listener = new PlainActionFuture<>(); - - var request = createHttpPost(0, "a", "b"); - service.send(request, null, listener); - service.start(); - - var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); - assertThat( - thrownException.getMessage(), - is(format("Failed to send request from inference entity id [%s]", request.inferenceEntityId())) - ); - assertThat(thrownException.getCause(), instanceOf(IllegalArgumentException.class)); - assertTrue(service.isTerminated()); - } - - public void testShutdown_AllowsMultipleCalls() { - var service = new HttpRequestExecutorService(getTestName(), mock(HttpClient.class), threadPool, null); - - service.shutdown(); - service.shutdown(); - service.shutdownNow(); - service.start(); - - assertTrue(service.isTerminated()); - assertTrue(service.isShutdown()); - } - - public void testSend_CallsOnFailure_WhenRequestTimesOut() { - var service = new HttpRequestExecutorService("test_service", mock(HttpClient.class), threadPool, null); - - var listener = new PlainActionFuture(); - service.send(HttpRequestTests.createMock("inferenceEntityId"), TimeValue.timeValueNanos(1), listener); - - var thrownException = expectThrows(ElasticsearchTimeoutException.class, () -> listener.actionGet(TIMEOUT)); - - assertThat( - thrownException.getMessage(), - is(format("Request timed out waiting to be executed after [%s]", TimeValue.timeValueNanos(1))) - ); - } - - public void testSend_NotifiesTasksOfShutdown() { - var service = new HttpRequestExecutorService("test_service", mock(HttpClient.class), threadPool, null); - - var listener = new PlainActionFuture(); - service.send(HttpRequestTests.createMock("inferenceEntityId"), null, listener); - service.shutdown(); - service.start(); - - var thrownException = expectThrows(EsRejectedExecutionException.class, () -> listener.actionGet(TIMEOUT)); - - assertThat( - thrownException.getMessage(), - is("Failed to send request, queue service [test_service] has shutdown prior to executing request") - ); - assertTrue(thrownException.isExecutorShutdown()); - assertTrue(service.isTerminated()); - } - - public void testQueueTake_Throwing_DoesNotCauseServiceToTerminate() throws InterruptedException { - @SuppressWarnings("unchecked") - BlockingQueue queue = mock(LinkedBlockingQueue.class); - when(queue.take()).thenThrow(new ElasticsearchException("failed")).thenReturn(new ShutdownTask()); - - var service = new HttpRequestExecutorService("test_service", mock(HttpClient.class), threadPool, queue, null); - - service.start(); - - assertTrue(service.isTerminated()); - verify(queue, times(2)).take(); - } - - public void testQueueTake_ThrowingInterruptedException_TerminatesService() throws Exception { - @SuppressWarnings("unchecked") - BlockingQueue queue = mock(LinkedBlockingQueue.class); - when(queue.take()).thenThrow(new InterruptedException("failed")); - - var service = new HttpRequestExecutorService("test_service", mock(HttpClient.class), threadPool, queue, null); - - Future executorTermination = threadPool.generic().submit(() -> { - try { - service.start(); - } catch (Exception e) { - fail(Strings.format("Failed to shutdown executor: %s", e)); - } - }); - - executorTermination.get(TIMEOUT.millis(), TimeUnit.MILLISECONDS); - - assertTrue(service.isTerminated()); - verify(queue, times(1)).take(); - } -} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceSettingsTests.java new file mode 100644 index 0000000000000..c0c0bdd49f617 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceSettingsTests.java @@ -0,0 +1,33 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; + +import static org.elasticsearch.xpack.inference.Utils.mockClusterService; + +public class RequestExecutorServiceSettingsTests { + public static RequestExecutorServiceSettings createRequestExecutorServiceSettingsEmpty() { + return createRequestExecutorServiceSettings(Settings.EMPTY); + } + + public static RequestExecutorServiceSettings createRequestExecutorServiceSettings(@Nullable Integer queueCapacity) { + var settingsBuilder = Settings.builder(); + + if (queueCapacity != null) { + settingsBuilder.put(RequestExecutorServiceSettings.TASK_QUEUE_CAPACITY_SETTING.getKey(), queueCapacity); + } + + return createRequestExecutorServiceSettings(settingsBuilder.build()); + } + + public static RequestExecutorServiceSettings createRequestExecutorServiceSettings(Settings settings) { + return new RequestExecutorServiceSettings(settings, mockClusterService(settings)); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java new file mode 100644 index 0000000000000..efbe8d62d7207 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java @@ -0,0 +1,423 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.sender; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.external.http.HttpClient; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.HttpRequestTests; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.common.AdjustableCapacityBlockingQueueTests.mockQueueCreator; +import static org.elasticsearch.xpack.inference.external.http.HttpClientTests.createHttpPost; +import static org.elasticsearch.xpack.inference.external.http.sender.RequestExecutorServiceSettingsTests.createRequestExecutorServiceSettings; +import static org.elasticsearch.xpack.inference.external.http.sender.RequestExecutorServiceSettingsTests.createRequestExecutorServiceSettingsEmpty; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class RequestExecutorServiceTests extends ESTestCase { + private static final TimeValue TIMEOUT = new TimeValue(30, TimeUnit.SECONDS); + private ThreadPool threadPool; + + @Before + public void init() { + threadPool = createThreadPool(inferenceUtilityPool()); + } + + @After + public void shutdown() { + terminate(threadPool); + } + + public void testQueueSize_IsEmpty() { + var service = createRequestExecutorServiceWithMocks(); + + assertThat(service.queueSize(), is(0)); + } + + public void testQueueSize_IsOne() { + var service = createRequestExecutorServiceWithMocks(); + service.execute(HttpRequestTests.createMock("inferenceEntityId"), null, new PlainActionFuture<>()); + + assertThat(service.queueSize(), is(1)); + } + + public void testIsTerminated_IsFalse() { + var service = createRequestExecutorServiceWithMocks(); + + assertFalse(service.isTerminated()); + } + + public void testIsTerminated_IsTrue() throws InterruptedException { + var latch = new CountDownLatch(1); + var service = createRequestExecutorService(null, latch); + + service.shutdown(); + service.start(); + latch.await(TIMEOUT.getSeconds(), TimeUnit.SECONDS); + + assertTrue(service.isTerminated()); + } + + public void testIsTerminated_AfterStopFromSeparateThread() throws Exception { + var waitToShutdown = new CountDownLatch(1); + + var mockHttpClient = mock(HttpClient.class); + doAnswer(invocation -> { + waitToShutdown.countDown(); + return Void.TYPE; + }).when(mockHttpClient).send(any(), any(), any()); + + var service = createRequestExecutorService(mockHttpClient, null); + + Future executorTermination = submitShutdownRequest(waitToShutdown, service); + + PlainActionFuture listener = new PlainActionFuture<>(); + service.execute(HttpRequestTests.createMock("inferenceEntityId"), null, listener); + + service.start(); + + try { + executorTermination.get(1, TimeUnit.SECONDS); + } catch (Exception e) { + fail(Strings.format("Executor finished before it was signaled to shutdown: %s", e)); + } + + assertTrue(service.isShutdown()); + assertTrue(service.isTerminated()); + } + + public void testSend_AfterShutdown_Throws() { + var service = createRequestExecutorServiceWithMocks(); + + service.shutdown(); + + var listener = new PlainActionFuture(); + service.execute(HttpRequestTests.createMock("inferenceEntityId"), null, listener); + + var thrownException = expectThrows(EsRejectedExecutionException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat( + thrownException.getMessage(), + is("Failed to enqueue task because the http executor service [test_service] has already shutdown") + ); + assertTrue(thrownException.isExecutorShutdown()); + } + + public void testSend_Throws_WhenQueueIsFull() { + var service = new RequestExecutorService( + "test_service", + mock(HttpClient.class), + threadPool, + null, + RequestExecutorServiceSettingsTests.createRequestExecutorServiceSettings(1) + ); + + service.execute(HttpRequestTests.createMock("inferenceEntityId"), null, new PlainActionFuture<>()); + var listener = new PlainActionFuture(); + service.execute(HttpRequestTests.createMock("inferenceEntityId"), null, listener); + + var thrownException = expectThrows(EsRejectedExecutionException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat( + thrownException.getMessage(), + is("Failed to execute task because the http executor service [test_service] queue is full") + ); + assertFalse(thrownException.isExecutorShutdown()); + } + + public void testTaskThrowsError_CallsOnFailure() throws Exception { + var httpClient = mock(HttpClient.class); + + var service = createRequestExecutorService(httpClient, null); + + doAnswer(invocation -> { + service.shutdown(); + throw new IllegalArgumentException("failed"); + }).when(httpClient).send(any(), any(), any()); + + PlainActionFuture listener = new PlainActionFuture<>(); + + var request = createHttpPost(0, "a", "b"); + service.execute(request, null, listener); + service.start(); + + var thrownException = expectThrows(ElasticsearchException.class, () -> listener.actionGet(TIMEOUT)); + assertThat( + thrownException.getMessage(), + is(format("Failed to send request from inference entity id [%s]", request.inferenceEntityId())) + ); + assertThat(thrownException.getCause(), instanceOf(IllegalArgumentException.class)); + assertTrue(service.isTerminated()); + } + + public void testShutdown_AllowsMultipleCalls() { + var service = createRequestExecutorServiceWithMocks(); + + service.shutdown(); + service.shutdown(); + service.start(); + + assertTrue(service.isTerminated()); + assertTrue(service.isShutdown()); + } + + public void testSend_CallsOnFailure_WhenRequestTimesOut() { + var service = createRequestExecutorServiceWithMocks(); + + var listener = new PlainActionFuture(); + service.execute(HttpRequestTests.createMock("inferenceEntityId"), TimeValue.timeValueNanos(1), listener); + + var thrownException = expectThrows(ElasticsearchTimeoutException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat( + thrownException.getMessage(), + is(format("Request timed out waiting to be executed after [%s]", TimeValue.timeValueNanos(1))) + ); + } + + public void testSend_NotifiesTasksOfShutdown() { + var service = createRequestExecutorServiceWithMocks(); + + var listener = new PlainActionFuture(); + service.execute(HttpRequestTests.createMock("inferenceEntityId"), null, listener); + service.shutdown(); + service.start(); + + var thrownException = expectThrows(EsRejectedExecutionException.class, () -> listener.actionGet(TIMEOUT)); + + assertThat( + thrownException.getMessage(), + is("Failed to send request, queue service [test_service] has shutdown prior to executing request") + ); + assertTrue(thrownException.isExecutorShutdown()); + assertTrue(service.isTerminated()); + } + + public void testQueueTake_DoesNotCauseServiceToTerminate_WhenItThrows() throws InterruptedException { + @SuppressWarnings("unchecked") + BlockingQueue queue = mock(LinkedBlockingQueue.class); + + var service = new RequestExecutorService( + getTestName(), + mock(HttpClient.class), + threadPool, + mockQueueCreator(queue), + null, + createRequestExecutorServiceSettingsEmpty() + ); + + when(queue.take()).thenThrow(new ElasticsearchException("failed")).thenAnswer(invocation -> { + service.shutdown(); + return null; + }); + service.start(); + + assertTrue(service.isTerminated()); + verify(queue, times(2)).take(); + } + + public void testQueueTake_ThrowingInterruptedException_TerminatesService() throws Exception { + @SuppressWarnings("unchecked") + BlockingQueue queue = mock(LinkedBlockingQueue.class); + when(queue.take()).thenThrow(new InterruptedException("failed")); + + var service = new RequestExecutorService( + getTestName(), + mock(HttpClient.class), + threadPool, + mockQueueCreator(queue), + null, + createRequestExecutorServiceSettingsEmpty() + ); + + Future executorTermination = threadPool.generic().submit(() -> { + try { + service.start(); + } catch (Exception e) { + fail(Strings.format("Failed to shutdown executor: %s", e)); + } + }); + + executorTermination.get(TIMEOUT.millis(), TimeUnit.MILLISECONDS); + + assertTrue(service.isTerminated()); + verify(queue, times(1)).take(); + } + + public void testChangingCapacity_SetsCapacityToTwo() throws ExecutionException, InterruptedException, TimeoutException, IOException { + var waitToShutdown = new CountDownLatch(1); + var httpClient = mock(HttpClient.class); + + var settings = createRequestExecutorServiceSettings(1); + var service = new RequestExecutorService("test_service", httpClient, threadPool, null, settings); + + service.execute(HttpRequestTests.createMock("inferenceEntityId"), null, new PlainActionFuture<>()); + assertThat(service.queueSize(), is(1)); + + PlainActionFuture listener = new PlainActionFuture<>(); + service.execute(HttpRequestTests.createMock("inferenceEntityId"), null, listener); + + var thrownException = expectThrows(EsRejectedExecutionException.class, () -> listener.actionGet(TIMEOUT)); + assertThat( + thrownException.getMessage(), + is("Failed to execute task because the http executor service [test_service] queue is full") + ); + + settings.setQueueCapacity(2); + + // There is a request already queued, and its execution path will initiate shutting down the service + doAnswer(invocation -> { + waitToShutdown.countDown(); + return Void.TYPE; + }).when(httpClient).send(any(), any(), any()); + + Future executorTermination = submitShutdownRequest(waitToShutdown, service); + + service.start(); + + executorTermination.get(TIMEOUT.millis(), TimeUnit.MILLISECONDS); + assertTrue(service.isTerminated()); + assertThat(service.remainingQueueCapacity(), is(2)); + } + + public void testChangingCapacity_DoesNotRejectsOverflowTasks_BecauseOfQueueFull() throws IOException, ExecutionException, + InterruptedException, TimeoutException { + var waitToShutdown = new CountDownLatch(1); + var httpClient = mock(HttpClient.class); + + var settings = createRequestExecutorServiceSettings(3); + var service = new RequestExecutorService("test_service", httpClient, threadPool, null, settings); + + service.execute(HttpRequestTests.createMock("inferenceEntityId"), null, new PlainActionFuture<>()); + service.execute(HttpRequestTests.createMock("inferenceEntityId"), null, new PlainActionFuture<>()); + + PlainActionFuture listener = new PlainActionFuture<>(); + service.execute(HttpRequestTests.createMock("inferenceEntityId"), null, listener); + assertThat(service.queueSize(), is(3)); + + settings.setQueueCapacity(1); + + // There is a request already queued, and its execution path will initiate shutting down the service + doAnswer(invocation -> { + waitToShutdown.countDown(); + return Void.TYPE; + }).when(httpClient).send(any(), any(), any()); + + Future executorTermination = submitShutdownRequest(waitToShutdown, service); + + service.start(); + + executorTermination.get(TIMEOUT.millis(), TimeUnit.MILLISECONDS); + assertTrue(service.isTerminated()); + assertThat(service.remainingQueueCapacity(), is(1)); + assertThat(service.queueSize(), is(0)); + + var thrownException = expectThrows( + EsRejectedExecutionException.class, + () -> listener.actionGet(TIMEOUT.getSeconds(), TimeUnit.SECONDS) + ); + assertThat( + thrownException.getMessage(), + is("Failed to send request, queue service [test_service] has shutdown prior to executing request") + ); + assertTrue(thrownException.isExecutorShutdown()); + } + + public void testChangingCapacity_ToZero_SetsQueueCapacityToUnbounded() throws IOException, ExecutionException, InterruptedException, + TimeoutException { + var waitToShutdown = new CountDownLatch(1); + var httpClient = mock(HttpClient.class); + + var settings = createRequestExecutorServiceSettings(1); + var service = new RequestExecutorService("test_service", httpClient, threadPool, null, settings); + + service.execute(HttpRequestTests.createMock("inferenceEntityId"), null, new PlainActionFuture<>()); + assertThat(service.queueSize(), is(1)); + + PlainActionFuture listener = new PlainActionFuture<>(); + service.execute(HttpRequestTests.createMock("inferenceEntityId"), null, listener); + + var thrownException = expectThrows(EsRejectedExecutionException.class, () -> listener.actionGet(TIMEOUT)); + assertThat( + thrownException.getMessage(), + is("Failed to execute task because the http executor service [test_service] queue is full") + ); + + settings.setQueueCapacity(0); + + // There is a request already queued, and its execution path will initiate shutting down the service + doAnswer(invocation -> { + waitToShutdown.countDown(); + return Void.TYPE; + }).when(httpClient).send(any(), any(), any()); + + Future executorTermination = submitShutdownRequest(waitToShutdown, service); + + service.start(); + + executorTermination.get(TIMEOUT.millis(), TimeUnit.MILLISECONDS); + assertTrue(service.isTerminated()); + assertThat(service.remainingQueueCapacity(), is(Integer.MAX_VALUE)); + } + + private Future submitShutdownRequest(CountDownLatch waitToShutdown, RequestExecutorService service) { + return threadPool.generic().submit(() -> { + try { + // wait for a task to be added to be executed before beginning shutdown + waitToShutdown.await(TIMEOUT.getSeconds(), TimeUnit.SECONDS); + service.shutdown(); + service.awaitTermination(TIMEOUT.getSeconds(), TimeUnit.SECONDS); + } catch (Exception e) { + fail(Strings.format("Failed to shutdown executor: %s", e)); + } + }); + } + + private RequestExecutorService createRequestExecutorServiceWithMocks() { + return createRequestExecutorService(null, null); + } + + private RequestExecutorService createRequestExecutorService(@Nullable HttpClient httpClient, @Nullable CountDownLatch startupLatch) { + var httpClientToUse = httpClient == null ? mock(HttpClient.class) : httpClient; + return new RequestExecutorService( + "test_service", + httpClientToUse, + threadPool, + startupLatch, + createRequestExecutorServiceSettingsEmpty() + ); + } +} From 67e9233e8249018258cea46d4f92830d058057ba Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Mon, 5 Feb 2024 13:50:56 -0600 Subject: [PATCH 034/106] Cleaning up the new ingest builders (#105149) Changing ingest builders to only hold a single representation for any given request field. --- .../action/bulk/BulkRequestBuilder.java | 18 +- .../action/delete/DeleteRequestBuilder.java | 15 +- .../action/index/IndexRequest.java | 21 ++- .../action/index/IndexRequestBuilder.java | 22 +-- .../ReplicationRequestBuilder.java | 11 +- .../InstanceShardOperationRequestBuilder.java | 12 +- .../action/update/UpdateRequest.java | 8 + .../action/update/UpdateRequestBuilder.java | 159 +++++------------- .../action/bulk/BulkRequestBuilderTests.java | 14 -- .../delete/DeleteRequestBuilderTests.java | 28 --- .../update/UpdateRequestBuilderTests.java | 18 -- 11 files changed, 78 insertions(+), 248 deletions(-) delete mode 100644 server/src/test/java/org/elasticsearch/action/delete/DeleteRequestBuilderTests.java diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestBuilder.java index 0d993d797b287..24d6fad554935 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequestBuilder.java @@ -48,11 +48,9 @@ public class BulkRequestBuilder extends ActionRequestLazyBuilder(); private ActiveShardCount waitForActiveShards; private TimeValue timeout; - private String timeoutString; private String globalPipeline; private String globalRouting; private WriteRequest.RefreshPolicy refreshPolicy; - private String refreshPolicyString; public BulkRequestBuilder(ElasticsearchClient client, @Nullable String globalIndex) { super(client, BulkAction.INSTANCE); @@ -166,7 +164,7 @@ public final BulkRequestBuilder setTimeout(TimeValue timeout) { * A timeout to wait if the index operation can't be performed immediately. Defaults to {@code 1m}. */ public final BulkRequestBuilder setTimeout(String timeout) { - this.timeoutString = timeout; + this.timeout = TimeValue.parseTimeValue(timeout, null, getClass().getSimpleName() + ".timeout"); return this; } @@ -195,7 +193,7 @@ public BulkRequestBuilder setRefreshPolicy(WriteRequest.RefreshPolicy refreshPol @Override public BulkRequestBuilder setRefreshPolicy(String refreshPolicy) { - this.refreshPolicyString = refreshPolicy; + this.refreshPolicy = WriteRequest.RefreshPolicy.parse(refreshPolicy); return this; } @@ -223,9 +221,6 @@ public BulkRequest request() { if (timeout != null) { request.timeout(timeout); } - if (timeoutString != null) { - request.timeout(timeoutString); - } if (globalPipeline != null) { request.pipeline(globalPipeline); } @@ -235,9 +230,6 @@ public BulkRequest request() { if (refreshPolicy != null) { request.setRefreshPolicy(refreshPolicy); } - if (refreshPolicyString != null) { - request.setRefreshPolicy(refreshPolicyString); - } return request; } @@ -247,12 +239,6 @@ private void validate() { "Must use only request builders, requests, or byte arrays within a single bulk request. Cannot mix and match" ); } - if (timeout != null && timeoutString != null) { - throw new IllegalStateException("Must use only one setTimeout method"); - } - if (refreshPolicy != null && refreshPolicyString != null) { - throw new IllegalStateException("Must use only one setRefreshPolicy method"); - } } private int countNonEmptyLists(List... lists) { diff --git a/server/src/main/java/org/elasticsearch/action/delete/DeleteRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/delete/DeleteRequestBuilder.java index dac5421bdeee0..f2b1dc7cd556c 100644 --- a/server/src/main/java/org/elasticsearch/action/delete/DeleteRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/delete/DeleteRequestBuilder.java @@ -29,7 +29,6 @@ public class DeleteRequestBuilder extends ReplicationRequestBuilder */ public IndexRequest source(XContentType xContentType, Object... source) { + return source(getXContentBuilder(xContentType, source)); + } + + /** + * Returns an XContentBuilder for the given xContentType and source array + *

+ * Note: the number of objects passed to this method as varargs must be an even + * number. Also the first argument in each pair (the field name) must have a + * valid String representation. + *

+ */ + public static XContentBuilder getXContentBuilder(XContentType xContentType, Object... source) { if (source.length % 2 != 0) { throw new IllegalArgumentException("The number of object passed must be even but was [" + source.length + "]"); } @@ -496,11 +508,14 @@ public IndexRequest source(XContentType xContentType, Object... source) { try { XContentBuilder builder = XContentFactory.contentBuilder(xContentType); builder.startObject(); - for (int i = 0; i < source.length; i++) { - builder.field(source[i++].toString(), source[i]); + // This for loop increments by 2 because the source array contains adjacent key/value pairs: + for (int i = 0; i < source.length; i = i + 2) { + String field = source[i].toString(); + Object value = source[i + 1]; + builder.field(field, value); } builder.endObject(); - return source(builder); + return builder; } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate", e); } diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java index 7e39bf5875686..0cb04fbdba1a6 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java @@ -43,7 +43,6 @@ public class IndexRequestBuilder extends ReplicationRequestBuilder */ public IndexRequestBuilder setSource(XContentType xContentType, Object... source) { - if (source.length % 2 != 0) { - throw new IllegalArgumentException("The number of object passed must be even but was [" + source.length + "]"); - } - try { - XContentBuilder builder = XContentFactory.contentBuilder(xContentType); - builder.startObject(); - for (int i = 0; i < source.length; i++) { - builder.field(source[i++].toString(), source[i]); - } - builder.endObject(); - return setSource(builder); - } catch (IOException e) { - throw new ElasticsearchGenerationException("Failed to generate", e); - } + return setSource(IndexRequest.getXContentBuilder(xContentType, source)); } /** @@ -278,13 +264,12 @@ public IndexRequestBuilder setRefreshPolicy(WriteRequest.RefreshPolicy refreshPo } public IndexRequestBuilder setRefreshPolicy(String refreshPolicy) { - this.refreshPolicyString = refreshPolicy; + this.refreshPolicy = WriteRequest.RefreshPolicy.parse(refreshPolicy); return this; } @Override public IndexRequest request() { - validate(); IndexRequest request = new IndexRequest(); super.apply(request); request.id(id); @@ -300,9 +285,6 @@ public IndexRequest request() { if (refreshPolicy != null) { request.setRefreshPolicy(refreshPolicy); } - if (refreshPolicyString != null) { - request.setRefreshPolicy(refreshPolicyString); - } if (ifSeqNo != null) { request.setIfSeqNo(ifSeqNo); } diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java index 94935a670afb7..8eb82af2091cd 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java @@ -23,7 +23,6 @@ public abstract class ReplicationRequestBuilder< Response> { private String index; private TimeValue timeout; - private String timeoutString; private ActiveShardCount waitForActiveShards; protected ReplicationRequestBuilder(ElasticsearchClient client, ActionType action) { @@ -44,7 +43,7 @@ public RequestBuilder setTimeout(TimeValue timeout) { */ @SuppressWarnings("unchecked") public RequestBuilder setTimeout(String timeout) { - this.timeoutString = timeout; + this.timeout = TimeValue.parseTimeValue(timeout, null, getClass().getSimpleName() + ".timeout"); return (RequestBuilder) this; } @@ -84,17 +83,9 @@ protected void apply(Request request) { if (timeout != null) { request.timeout(timeout); } - if (timeoutString != null) { - request.timeout(timeoutString); - } if (waitForActiveShards != null) { request.waitForActiveShards(waitForActiveShards); } } - protected void validate() throws IllegalStateException { - if (timeout != null && timeoutString != null) { - throw new IllegalStateException("Must use only one setTimeout method"); - } - } } diff --git a/server/src/main/java/org/elasticsearch/action/support/single/instance/InstanceShardOperationRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/support/single/instance/InstanceShardOperationRequestBuilder.java index 1678b08aba940..64efcda2f14db 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/instance/InstanceShardOperationRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/instance/InstanceShardOperationRequestBuilder.java @@ -22,7 +22,6 @@ public abstract class InstanceShardOperationRequestBuilder< Response> { private String index; private TimeValue timeout; - private String timeoutString; protected InstanceShardOperationRequestBuilder(ElasticsearchClient client, ActionType action) { super(client, action); @@ -52,7 +51,7 @@ public RequestBuilder setTimeout(TimeValue timeout) { */ @SuppressWarnings("unchecked") public RequestBuilder setTimeout(String timeout) { - this.timeoutString = timeout; + this.timeout = TimeValue.parseTimeValue(timeout, null, getClass().getSimpleName() + ".timeout"); return (RequestBuilder) this; } @@ -63,14 +62,5 @@ protected void apply(Request request) { if (timeout != null) { request.timeout(timeout); } - if (timeoutString != null) { - request.timeout(timeoutString); - } - } - - protected void validate() throws IllegalStateException { - if (timeoutString != null && timeout != null) { - throw new IllegalStateException("Must use only one setTimeout method"); - } } } diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java b/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java index d7b1ea46b77b0..36b6cc6aa9964 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java @@ -704,6 +704,14 @@ private IndexRequest safeDoc() { return doc; } + /** + * Sets the doc source of the update request to be used when the document does not exists. + */ + public UpdateRequest upsert(BytesReference source, XContentType contentType) { + safeUpsertRequest().source(source, contentType); + return this; + } + /** * Sets the index request to be used if the document does not exists. Otherwise, a * {@link org.elasticsearch.index.engine.DocumentMissingException} is thrown. diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java index cbf28d6718594..c1ee0f7b8af37 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java @@ -8,6 +8,7 @@ package org.elasticsearch.action.update; +import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.WriteRequest; @@ -15,12 +16,17 @@ import org.elasticsearch.action.support.replication.ReplicationRequest; import org.elasticsearch.action.support.single.instance.InstanceShardOperationRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; +import org.elasticsearch.client.internal.Requests; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.VersionType; import org.elasticsearch.script.Script; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; +import java.io.IOException; import java.util.Map; public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder @@ -45,31 +51,18 @@ public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder docSourceMap; + private BytesReference docSourceBytesReference; private XContentType docSourceXContentType; - private String docSourceString; - private byte[] docSourceBytes; - private Integer docSourceOffset; - private Integer docSourceLength; - private Object[] docSourceArray; private IndexRequest upsert; - private XContentBuilder upsertSourceXContentBuilder; - private Map upsertSourceMap; + private BytesReference upsertSourceBytesReference; private XContentType upsertSourceXContentType; - private String upsertSourceString; - private byte[] upsertSourceBytes; - private Integer upsertSourceOffset; - private Integer upsertSourceLength; - private Object[] upsertSourceArray; private Boolean docAsUpsert; private Boolean detectNoop; private Boolean scriptedUpsert; private Boolean requireAlias; private WriteRequest.RefreshPolicy refreshPolicy; - private String refreshPolicyString; public UpdateRequestBuilder(ElasticsearchClient client) { this(client, null, null); @@ -235,7 +228,8 @@ public UpdateRequestBuilder setDoc(IndexRequest indexRequest) { * Sets the doc to use for updates when a script is not specified. */ public UpdateRequestBuilder setDoc(XContentBuilder source) { - this.docSourceXContentBuilder = source; + this.docSourceBytesReference = BytesReference.bytes(source); + this.docSourceXContentType = source.contentType(); return this; } @@ -243,24 +237,27 @@ public UpdateRequestBuilder setDoc(XContentBuilder source) { * Sets the doc to use for updates when a script is not specified. */ public UpdateRequestBuilder setDoc(Map source) { - this.docSourceMap = source; - return this; + return setDoc(source, Requests.INDEX_CONTENT_TYPE); } /** * Sets the doc to use for updates when a script is not specified. */ public UpdateRequestBuilder setDoc(Map source, XContentType contentType) { - this.docSourceMap = source; - this.docSourceXContentType = contentType; - return this; + try { + XContentBuilder builder = XContentFactory.contentBuilder(contentType); + builder.map(source); + return setDoc(builder); + } catch (IOException e) { + throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); + } } /** * Sets the doc to use for updates when a script is not specified. */ public UpdateRequestBuilder setDoc(String source, XContentType xContentType) { - this.docSourceString = source; + this.docSourceBytesReference = new BytesArray(source); this.docSourceXContentType = xContentType; return this; } @@ -269,18 +266,14 @@ public UpdateRequestBuilder setDoc(String source, XContentType xContentType) { * Sets the doc to use for updates when a script is not specified. */ public UpdateRequestBuilder setDoc(byte[] source, XContentType xContentType) { - this.docSourceBytes = source; - this.docSourceXContentType = xContentType; - return this; + return setDoc(source, 0, source.length, xContentType); } /** * Sets the doc to use for updates when a script is not specified. */ public UpdateRequestBuilder setDoc(byte[] source, int offset, int length, XContentType xContentType) { - this.docSourceBytes = source; - this.docSourceOffset = offset; - this.docSourceLength = length; + this.docSourceBytesReference = new BytesArray(source, offset, length); this.docSourceXContentType = xContentType; return this; } @@ -290,8 +283,7 @@ public UpdateRequestBuilder setDoc(byte[] source, int offset, int length, XConte * is a field and value pairs. */ public UpdateRequestBuilder setDoc(Object... source) { - this.docSourceArray = source; - return this; + return setDoc(Requests.INDEX_CONTENT_TYPE, source); } /** @@ -299,9 +291,7 @@ public UpdateRequestBuilder setDoc(Object... source) { * is a field and value pairs. */ public UpdateRequestBuilder setDoc(XContentType xContentType, Object... source) { - this.docSourceArray = source; - this.docSourceXContentType = xContentType; - return this; + return setDoc(IndexRequest.getXContentBuilder(xContentType, source)); } /** @@ -317,7 +307,8 @@ public UpdateRequestBuilder setUpsert(IndexRequest indexRequest) { * Sets the doc source of the update request to be used when the document does not exists. */ public UpdateRequestBuilder setUpsert(XContentBuilder source) { - this.upsertSourceXContentBuilder = source; + this.upsertSourceBytesReference = BytesReference.bytes(source); + this.upsertSourceXContentType = source.contentType(); return this; } @@ -325,24 +316,27 @@ public UpdateRequestBuilder setUpsert(XContentBuilder source) { * Sets the doc source of the update request to be used when the document does not exists. */ public UpdateRequestBuilder setUpsert(Map source) { - this.upsertSourceMap = source; - return this; + return setUpsert(source, Requests.INDEX_CONTENT_TYPE); } /** * Sets the doc source of the update request to be used when the document does not exists. */ public UpdateRequestBuilder setUpsert(Map source, XContentType contentType) { - this.upsertSourceMap = source; - this.upsertSourceXContentType = contentType; - return this; + try { + XContentBuilder builder = XContentFactory.contentBuilder(contentType); + builder.map(source); + return setUpsert(builder); + } catch (IOException e) { + throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); + } } /** * Sets the doc source of the update request to be used when the document does not exists. */ public UpdateRequestBuilder setUpsert(String source, XContentType xContentType) { - this.upsertSourceString = source; + this.upsertSourceBytesReference = new BytesArray(source); this.upsertSourceXContentType = xContentType; return this; } @@ -351,18 +345,14 @@ public UpdateRequestBuilder setUpsert(String source, XContentType xContentType) * Sets the doc source of the update request to be used when the document does not exists. */ public UpdateRequestBuilder setUpsert(byte[] source, XContentType xContentType) { - this.upsertSourceBytes = source; - this.upsertSourceXContentType = xContentType; - return this; + return setUpsert(source, 0, source.length, xContentType); } /** * Sets the doc source of the update request to be used when the document does not exists. */ public UpdateRequestBuilder setUpsert(byte[] source, int offset, int length, XContentType xContentType) { - this.upsertSourceBytes = source; - this.upsertSourceOffset = offset; - this.upsertSourceLength = length; + this.upsertSourceBytesReference = new BytesArray(source, offset, length); this.upsertSourceXContentType = xContentType; return this; } @@ -372,8 +362,7 @@ public UpdateRequestBuilder setUpsert(byte[] source, int offset, int length, XCo * includes field and value pairs. */ public UpdateRequestBuilder setUpsert(Object... source) { - this.upsertSourceArray = source; - return this; + return setUpsert(Requests.INDEX_CONTENT_TYPE, source); } /** @@ -381,9 +370,7 @@ public UpdateRequestBuilder setUpsert(Object... source) { * includes field and value pairs. */ public UpdateRequestBuilder setUpsert(XContentType xContentType, Object... source) { - this.upsertSourceArray = source; - this.upsertSourceXContentType = xContentType; - return this; + return setUpsert(IndexRequest.getXContentBuilder(xContentType, source)); } /** @@ -427,7 +414,7 @@ public UpdateRequestBuilder setRefreshPolicy(WriteRequest.RefreshPolicy refreshP @Override public UpdateRequestBuilder setRefreshPolicy(String refreshPolicy) { - this.refreshPolicyString = refreshPolicy; + this.refreshPolicy = WriteRequest.RefreshPolicy.parse(refreshPolicy); return this; } @@ -475,58 +462,14 @@ public UpdateRequest request() { if (doc != null) { request.doc(doc); } - if (docSourceXContentBuilder != null) { - request.doc(docSourceXContentBuilder); - } - if (docSourceMap != null) { - if (docSourceXContentType == null) { - request.doc(docSourceMap); - } else { - request.doc(docSourceMap, docSourceXContentType); - } - } - if (docSourceString != null && docSourceXContentType != null) { - request.doc(docSourceString, docSourceXContentType); - } - if (docSourceBytes != null && docSourceXContentType != null) { - if (docSourceOffset != null && docSourceLength != null) { - request.doc(docSourceBytes, docSourceOffset, docSourceLength, docSourceXContentType); - } - } - if (docSourceArray != null) { - if (docSourceXContentType == null) { - request.doc(docSourceArray); - } else { - request.doc(docSourceXContentType, docSourceArray); - } + if (docSourceBytesReference != null && docSourceXContentType != null) { + request.doc(docSourceBytesReference, docSourceXContentType); } if (upsert != null) { request.upsert(upsert); } - if (upsertSourceXContentBuilder != null) { - request.upsert(upsertSourceXContentBuilder); - } - if (upsertSourceMap != null) { - if (upsertSourceXContentType == null) { - request.upsert(upsertSourceMap); - } else { - request.upsert(upsertSourceMap, upsertSourceXContentType); - } - } - if (upsertSourceString != null && upsertSourceXContentType != null) { - request.upsert(upsertSourceString, upsertSourceXContentType); - } - if (upsertSourceBytes != null && upsertSourceXContentType != null) { - if (upsertSourceOffset != null && upsertSourceLength != null) { - request.upsert(upsertSourceBytes, upsertSourceOffset, upsertSourceLength, upsertSourceXContentType); - } - } - if (upsertSourceArray != null) { - if (upsertSourceXContentType == null) { - request.upsert(upsertSourceArray); - } else { - request.upsert(upsertSourceXContentType, upsertSourceArray); - } + if (upsertSourceBytesReference != null && upsertSourceXContentType != null) { + request.upsert(upsertSourceBytesReference, upsertSourceXContentType); } if (docAsUpsert != null) { request.docAsUpsert(docAsUpsert); @@ -543,15 +486,10 @@ public UpdateRequest request() { if (refreshPolicy != null) { request.setRefreshPolicy(refreshPolicy); } - if (refreshPolicyString != null) { - request.setRefreshPolicy(refreshPolicyString); - } return request; } - @Override protected void validate() throws IllegalStateException { - super.validate(); boolean fetchIncludeExcludeNotNull = fetchSourceInclude != null || fetchSourceExclude != null; boolean fetchIncludeExcludeArrayNotNull = fetchSourceIncludeArray != null || fetchSourceExcludeArray != null; boolean fetchSourceNotNull = fetchSource != null; @@ -571,18 +509,11 @@ protected void validate() throws IllegalStateException { } private int countDocSourceFieldsSet() { - return countNonNullObjects(doc, docSourceXContentBuilder, docSourceMap, docSourceString, docSourceBytes, docSourceArray); + return countNonNullObjects(doc, docSourceBytesReference); } private int countUpsertSourceFieldsSet() { - return countNonNullObjects( - upsert, - upsertSourceXContentBuilder, - upsertSourceMap, - upsertSourceString, - upsertSourceBytes, - upsertSourceArray - ); + return countNonNullObjects(upsert, upsertSourceBytesReference); } private int countNonNullObjects(Object... objects) { diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestBuilderTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestBuilderTests.java index 8843801e528a3..27b1104163d67 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestBuilderTests.java @@ -10,8 +10,6 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESTestCase; public class BulkRequestBuilderTests extends ESTestCase { @@ -21,17 +19,5 @@ public void testValidation() { bulkRequestBuilder.add(new IndexRequestBuilder(null, randomAlphaOfLength(10))); bulkRequestBuilder.add(new IndexRequest()); expectThrows(IllegalStateException.class, bulkRequestBuilder::request); - - bulkRequestBuilder = new BulkRequestBuilder(null, null); - bulkRequestBuilder.add(new IndexRequestBuilder(null, randomAlphaOfLength(10))); - bulkRequestBuilder.setTimeout(randomTimeValue()); - bulkRequestBuilder.setTimeout(TimeValue.timeValueSeconds(randomIntBetween(1, 30))); - expectThrows(IllegalStateException.class, bulkRequestBuilder::request); - - bulkRequestBuilder = new BulkRequestBuilder(null, null); - bulkRequestBuilder.add(new IndexRequestBuilder(null, randomAlphaOfLength(10))); - bulkRequestBuilder.setRefreshPolicy(randomFrom(WriteRequest.RefreshPolicy.values()).getValue()); - bulkRequestBuilder.setRefreshPolicy(randomFrom(WriteRequest.RefreshPolicy.values())); - expectThrows(IllegalStateException.class, bulkRequestBuilder::request); } } diff --git a/server/src/test/java/org/elasticsearch/action/delete/DeleteRequestBuilderTests.java b/server/src/test/java/org/elasticsearch/action/delete/DeleteRequestBuilderTests.java deleted file mode 100644 index 0a59dac833ca9..0000000000000 --- a/server/src/test/java/org/elasticsearch/action/delete/DeleteRequestBuilderTests.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.delete; - -import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.test.ESTestCase; - -public class DeleteRequestBuilderTests extends ESTestCase { - - public void testValidation() { - DeleteRequestBuilder deleteRequestBuilder = new DeleteRequestBuilder(null, randomAlphaOfLength(10)); - deleteRequestBuilder.setRefreshPolicy(randomFrom(WriteRequest.RefreshPolicy.values()).toString()); - deleteRequestBuilder.setRefreshPolicy(randomFrom(WriteRequest.RefreshPolicy.values())); - expectThrows(IllegalStateException.class, deleteRequestBuilder::request); - - deleteRequestBuilder = new DeleteRequestBuilder(null, randomAlphaOfLength(10)); - deleteRequestBuilder.setTimeout(randomTimeValue()); - deleteRequestBuilder.setTimeout(TimeValue.timeValueSeconds(randomIntBetween(1, 30))); - expectThrows(IllegalStateException.class, deleteRequestBuilder::request); - } -} diff --git a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestBuilderTests.java b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestBuilderTests.java index 8091daed2b5b0..b2ab56c73c584 100644 --- a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestBuilderTests.java @@ -8,11 +8,8 @@ package org.elasticsearch.action.update; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESTestCase; -import java.util.Map; - public class UpdateRequestBuilderTests extends ESTestCase { public void testValidation() { @@ -20,20 +17,5 @@ public void testValidation() { updateRequestBuilder.setFetchSource(randomAlphaOfLength(10), randomAlphaOfLength(10)); updateRequestBuilder.setFetchSource(true); expectThrows(IllegalStateException.class, updateRequestBuilder::request); - - updateRequestBuilder = new UpdateRequestBuilder(null); - updateRequestBuilder.setTimeout(randomTimeValue()); - updateRequestBuilder.setTimeout(TimeValue.timeValueSeconds(randomIntBetween(1, 30))); - expectThrows(IllegalStateException.class, updateRequestBuilder::request); - - updateRequestBuilder = new UpdateRequestBuilder(null); - updateRequestBuilder.setDoc("key", "value"); - updateRequestBuilder.setDoc(Map.of("key", "value")); - expectThrows(IllegalStateException.class, updateRequestBuilder::request); - - updateRequestBuilder = new UpdateRequestBuilder(null); - updateRequestBuilder.setUpsert("key", "value"); - updateRequestBuilder.setUpsert(Map.of("key", "value")); - expectThrows(IllegalStateException.class, updateRequestBuilder::request); } } From b250f06b09cc874ee413bc9bb8bd516cc0ccc1db Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 5 Feb 2024 12:21:52 -0800 Subject: [PATCH 035/106] Add a gradle plugin for embedded providers (#105094) x-content embeds its jackson implementation inside its jar. This commit formalizes the setup for this embedding with a gradle plugin so that it can be reused by other libs. --- build-tools-internal/build.gradle | 4 ++ .../internal/EmbeddedProviderExtension.java | 62 +++++++++++++++++++ .../internal/EmbeddedProviderPlugin.java | 34 ++++++++++ libs/x-content/build.gradle | 48 +------------- 4 files changed, 103 insertions(+), 45 deletions(-) create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/EmbeddedProviderExtension.java create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/EmbeddedProviderPlugin.java diff --git a/build-tools-internal/build.gradle b/build-tools-internal/build.gradle index 934d9f05d77a2..758cdf687e6b6 100644 --- a/build-tools-internal/build.gradle +++ b/build-tools-internal/build.gradle @@ -143,6 +143,10 @@ gradlePlugin { id = 'elasticsearch.mrjar' implementationClass = 'org.elasticsearch.gradle.internal.MrjarPlugin' } + embeddedProvider { + id = 'elasticsearch.embedded-providers' + implementationClass = 'org.elasticsearch.gradle.internal.EmbeddedProviderPlugin' + } releaseTools { id = 'elasticsearch.release-tools' implementationClass = 'org.elasticsearch.gradle.internal.release.ReleaseToolsPlugin' diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/EmbeddedProviderExtension.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/EmbeddedProviderExtension.java new file mode 100644 index 0000000000000..d3f79f7f76d4f --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/EmbeddedProviderExtension.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal; + +import org.gradle.api.Project; +import org.gradle.api.artifacts.Configuration; +import org.gradle.api.file.Directory; +import org.gradle.api.provider.Provider; +import org.gradle.api.tasks.SourceSet; +import org.gradle.api.tasks.Sync; + +import static org.elasticsearch.gradle.internal.conventions.GUtils.capitalize; +import static org.elasticsearch.gradle.util.GradleUtils.getJavaSourceSets; +import static org.gradle.api.artifacts.type.ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE; +import static org.gradle.api.artifacts.type.ArtifactTypeDefinition.DIRECTORY_TYPE; + +public class EmbeddedProviderExtension { + + private final Project project; + + public EmbeddedProviderExtension(Project project) { + this.project = project; + } + + void impl(String implName, Project implProject) { + String projectName = implProject.getName(); + String capitalName = capitalize(projectName); + + Configuration implConfig = project.getConfigurations().detachedConfiguration(project.getDependencies().create(implProject)); + implConfig.attributes(attrs -> { + attrs.attribute(ARTIFACT_TYPE_ATTRIBUTE, DIRECTORY_TYPE); + attrs.attribute(EmbeddedProviderPlugin.IMPL_ATTR, true); + }); + + String manifestTaskName = "generate" + capitalName + "ProviderManifest"; + Provider generatedResourcesDir = project.getLayout().getBuildDirectory().dir("generated-resources"); + var generateProviderManifest = project.getTasks().register(manifestTaskName, GenerateProviderManifest.class); + generateProviderManifest.configure(t -> { + t.getManifestFile().set(generatedResourcesDir.map(d -> d.file("LISTING.TXT"))); + t.getProviderImplClasspath().from(implConfig); + }); + + String implTaskName = "generate" + capitalName + "ProviderImpl"; + var generateProviderImpl = project.getTasks().register(implTaskName, Sync.class); + generateProviderImpl.configure(t -> { + t.into(generatedResourcesDir); + t.into("IMPL-JARS/" + implName, spec -> { + spec.from(implConfig); + spec.from(generateProviderManifest); + }); + }); + + var mainSourceSet = getJavaSourceSets(project).findByName(SourceSet.MAIN_SOURCE_SET_NAME); + mainSourceSet.getOutput().dir(generateProviderImpl); + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/EmbeddedProviderPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/EmbeddedProviderPlugin.java new file mode 100644 index 0000000000000..213730139d915 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/EmbeddedProviderPlugin.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal; + +import org.elasticsearch.gradle.transform.UnzipTransform; +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.attributes.Attribute; + +import static org.gradle.api.artifacts.type.ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE; +import static org.gradle.api.artifacts.type.ArtifactTypeDefinition.DIRECTORY_TYPE; +import static org.gradle.api.artifacts.type.ArtifactTypeDefinition.JAR_TYPE; + +public class EmbeddedProviderPlugin implements Plugin { + static final Attribute IMPL_ATTR = Attribute.of("is.impl", Boolean.class); + + @Override + public void apply(Project project) { + + project.getDependencies().registerTransform(UnzipTransform.class, transformSpec -> { + transformSpec.getFrom().attribute(ARTIFACT_TYPE_ATTRIBUTE, JAR_TYPE).attribute(IMPL_ATTR, true); + transformSpec.getTo().attribute(ARTIFACT_TYPE_ATTRIBUTE, DIRECTORY_TYPE).attribute(IMPL_ATTR, true); + transformSpec.parameters(parameters -> parameters.getIncludeArtifactName().set(true)); + }); + + project.getExtensions().create("embeddedProviders", EmbeddedProviderExtension.class, project); + } +} diff --git a/libs/x-content/build.gradle b/libs/x-content/build.gradle index 5c9dd49c007b8..15a79364559a2 100644 --- a/libs/x-content/build.gradle +++ b/libs/x-content/build.gradle @@ -6,44 +6,17 @@ * Side Public License, v 1. */ - -import org.elasticsearch.gradle.transform.UnzipTransform -import org.elasticsearch.gradle.internal.GenerateProviderManifest -import org.gradle.api.internal.artifacts.ArtifactAttributes - -import java.util.stream.Collectors - apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.publish' +apply plugin: 'elasticsearch.embedded-providers' -def isImplAttr = Attribute.of("is.impl", Boolean) - -configurations { - providerImpl { - attributes.attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE) - attributes.attribute(isImplAttr, true) - } +embeddedProviders { + impl 'x-content', project(':libs:elasticsearch-x-content:impl') } dependencies { - registerTransform( - UnzipTransform.class, transformSpec -> { - transformSpec.getFrom() - .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.JAR_TYPE) - .attribute(isImplAttr, true) - transformSpec.getTo() - .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE) - .attribute(isImplAttr, true) - transformSpec.parameters(parameters -> { - parameters.includeArtifactName.set(true) - }) - - }) - api project(':libs:elasticsearch-core') - providerImpl project(':libs:elasticsearch-x-content:impl') - testImplementation(project(":test:framework")) { exclude group: 'org.elasticsearch', module: 'elasticsearch-x-content' } @@ -66,18 +39,3 @@ tasks.named("thirdPartyAudit").configure { tasks.named("dependencyLicenses").configure { mapping from: /jackson-.*/, to: 'jackson' } - -Directory generatedResourcesDir = layout.buildDirectory.dir('generated-resources').get() -def generateProviderManifest = tasks.register("generateProviderManifest", GenerateProviderManifest.class) { - manifestFile = generatedResourcesDir.file("LISTING.TXT") - getProviderImplClasspath().from(configurations.providerImpl) -} - -def generateProviderImpl = tasks.register("generateProviderImpl", Sync) { - destinationDir = generatedResourcesDir.dir("impl").getAsFile() - into("IMPL-JARS/x-content") { - from(configurations.providerImpl) - from(generateProviderManifest) - } -} -sourceSets.main.output.dir(generateProviderImpl) From 37b57a411bda1efb86359b6e2dd853b3cc195c41 Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Mon, 5 Feb 2024 15:38:19 -0500 Subject: [PATCH 036/106] [ci] Allow CI to be triggered by old elasticmachine-style comment (#105154) --- .buildkite/pull-requests.json | 2 +- .../__snapshots__/pipeline.test.ts.snap | 108 ++++++++++++++++++ .../scripts/pull-request/pipeline.test.ts | 20 +++- .buildkite/scripts/pull-request/pipeline.ts | 4 +- 4 files changed, 130 insertions(+), 4 deletions(-) diff --git a/.buildkite/pull-requests.json b/.buildkite/pull-requests.json index c4aa43c775b1e..de0212685a8a7 100644 --- a/.buildkite/pull-requests.json +++ b/.buildkite/pull-requests.json @@ -11,7 +11,7 @@ "set_commit_status": false, "build_on_commit": true, "build_on_comment": true, - "trigger_comment_regex": "(run\\W+elasticsearch-ci.+)|(^\\s*(buildkite\\s*)?test\\s+this(\\s+please)?)", + "trigger_comment_regex": "(run\\W+elasticsearch-ci.+)|(^\\s*((buildkite|@elastic(search)?machine)\\s*)?test\\s+this(\\s+please)?)", "cancel_intermediate_builds": true, "cancel_intermediate_builds_on_comment": false }, diff --git a/.buildkite/scripts/pull-request/__snapshots__/pipeline.test.ts.snap b/.buildkite/scripts/pull-request/__snapshots__/pipeline.test.ts.snap index 6df8ca8b63438..50dea7a07e042 100644 --- a/.buildkite/scripts/pull-request/__snapshots__/pipeline.test.ts.snap +++ b/.buildkite/scripts/pull-request/__snapshots__/pipeline.test.ts.snap @@ -201,3 +201,111 @@ exports[`generatePipelines should generate correct pipeline when using a trigger }, ] `; + +exports[`generatePipelines should generate correct pipelines with a non-docs change and @elasticmachine 1`] = ` +[ + { + "name": "bwc-snapshots", + "pipeline": { + "steps": [ + { + "group": "bwc-snapshots", + "steps": [ + { + "agents": { + "buildDirectory": "/dev/shm/bk", + "image": "family/elasticsearch-ubuntu-2004", + "machineType": "custom-32-98304", + "provider": "gcp", + }, + "command": ".ci/scripts/run-gradle.sh -Dignore.tests.seed v{{matrix.BWC_VERSION}}#bwcTest", + "env": { + "BWC_VERSION": "{{matrix.BWC_VERSION}}", + }, + "label": "{{matrix.BWC_VERSION}} / bwc-snapshots", + "matrix": { + "setup": { + "BWC_VERSION": [ + "7.17.14", + "8.10.3", + "8.11.0", + ], + }, + }, + "timeout_in_minutes": 300, + }, + ], + }, + ], + }, + }, + { + "name": "using-defaults", + "pipeline": { + "env": { + "CUSTOM_ENV_VAR": "value", + }, + "steps": [ + { + "command": "echo 'hello world'", + "label": "test-step", + }, + ], + }, + }, +] +`; + +exports[`generatePipelines should generate correct pipelines with a non-docs change and @elasticsearchmachine 1`] = ` +[ + { + "name": "bwc-snapshots", + "pipeline": { + "steps": [ + { + "group": "bwc-snapshots", + "steps": [ + { + "agents": { + "buildDirectory": "/dev/shm/bk", + "image": "family/elasticsearch-ubuntu-2004", + "machineType": "custom-32-98304", + "provider": "gcp", + }, + "command": ".ci/scripts/run-gradle.sh -Dignore.tests.seed v{{matrix.BWC_VERSION}}#bwcTest", + "env": { + "BWC_VERSION": "{{matrix.BWC_VERSION}}", + }, + "label": "{{matrix.BWC_VERSION}} / bwc-snapshots", + "matrix": { + "setup": { + "BWC_VERSION": [ + "7.17.14", + "8.10.3", + "8.11.0", + ], + }, + }, + "timeout_in_minutes": 300, + }, + ], + }, + ], + }, + }, + { + "name": "using-defaults", + "pipeline": { + "env": { + "CUSTOM_ENV_VAR": "value", + }, + "steps": [ + { + "command": "echo 'hello world'", + "label": "test-step", + }, + ], + }, + }, +] +`; diff --git a/.buildkite/scripts/pull-request/pipeline.test.ts b/.buildkite/scripts/pull-request/pipeline.test.ts index d0634752260e4..562f37abbae1f 100644 --- a/.buildkite/scripts/pull-request/pipeline.test.ts +++ b/.buildkite/scripts/pull-request/pipeline.test.ts @@ -13,11 +13,11 @@ describe("generatePipelines", () => { }); // Helper for testing pipeline generations that should be the same when using the overall ci trigger comment "buildkite test this" - const testWithTriggerCheck = (directory: string, changedFiles?: string[]) => { + const testWithTriggerCheck = (directory: string, changedFiles?: string[], comment = "buildkite test this") => { const pipelines = generatePipelines(directory, changedFiles); expect(pipelines).toMatchSnapshot(); - process.env["GITHUB_PR_TRIGGER_COMMENT"] = "buildkite test this"; + process.env["GITHUB_PR_TRIGGER_COMMENT"] = comment; const pipelinesWithTriggerComment = generatePipelines(directory, changedFiles); expect(pipelinesWithTriggerComment).toEqual(pipelines); }; @@ -42,4 +42,20 @@ describe("generatePipelines", () => { const pipelines = generatePipelines(`${import.meta.dir}/mocks/pipelines`, ["build.gradle"]); expect(pipelines).toMatchSnapshot(); }); + + test("should generate correct pipelines with a non-docs change and @elasticmachine", () => { + testWithTriggerCheck( + `${import.meta.dir}/mocks/pipelines`, + ["build.gradle", "docs/README.asciidoc"], + "@elasticmachine test this please" + ); + }); + + test("should generate correct pipelines with a non-docs change and @elasticsearchmachine", () => { + testWithTriggerCheck( + `${import.meta.dir}/mocks/pipelines`, + ["build.gradle", "docs/README.asciidoc"], + "@elasticsearchmachine test this please" + ); + }); }); diff --git a/.buildkite/scripts/pull-request/pipeline.ts b/.buildkite/scripts/pull-request/pipeline.ts index 65aec47fe3cc8..6cb0e5d76b74b 100644 --- a/.buildkite/scripts/pull-request/pipeline.ts +++ b/.buildkite/scripts/pull-request/pipeline.ts @@ -148,7 +148,9 @@ export const generatePipelines = ( // However, if we're using the overall CI trigger "[buildkite] test this [please]", we should use the regular filters above if ( process.env["GITHUB_PR_TRIGGER_COMMENT"] && - !process.env["GITHUB_PR_TRIGGER_COMMENT"].match(/^\s*(buildkite\s*)?test\s+this(\s+please)?/i) + !process.env["GITHUB_PR_TRIGGER_COMMENT"].match( + /^\s*((@elastic(search)?machine|buildkite)\s*)?test\s+this(\s+please)?/i + ) ) { filters = [triggerCommentCheck]; } From e631f760178a030f727241710c71f4c88ac8dad0 Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Mon, 5 Feb 2024 15:39:51 -0500 Subject: [PATCH 037/106] Mute failing tests (#105156) This PR mutes a couple tests that are flaky from a recent PR merge: https://github.com/elastic/elasticsearch/pull/105037 For this issue: https://github.com/elastic/elasticsearch/issues/105155 --- .../external/http/sender/RequestExecutorServiceTests.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java index efbe8d62d7207..a4282bbef058d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java @@ -277,6 +277,7 @@ public void testQueueTake_ThrowingInterruptedException_TerminatesService() throw verify(queue, times(1)).take(); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105155") public void testChangingCapacity_SetsCapacityToTwo() throws ExecutionException, InterruptedException, TimeoutException, IOException { var waitToShutdown = new CountDownLatch(1); var httpClient = mock(HttpClient.class); @@ -313,6 +314,7 @@ public void testChangingCapacity_SetsCapacityToTwo() throws ExecutionException, assertThat(service.remainingQueueCapacity(), is(2)); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105155") public void testChangingCapacity_DoesNotRejectsOverflowTasks_BecauseOfQueueFull() throws IOException, ExecutionException, InterruptedException, TimeoutException { var waitToShutdown = new CountDownLatch(1); @@ -356,6 +358,7 @@ public void testChangingCapacity_DoesNotRejectsOverflowTasks_BecauseOfQueueFull( assertTrue(thrownException.isExecutorShutdown()); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105155") public void testChangingCapacity_ToZero_SetsQueueCapacityToUnbounded() throws IOException, ExecutionException, InterruptedException, TimeoutException { var waitToShutdown = new CountDownLatch(1); From 40e0f1f8177815a555774f2d6995d8d355451296 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Mon, 5 Feb 2024 15:34:55 -0800 Subject: [PATCH 038/106] Field-caps should read fields from up-to-dated shards (#105153) I have seen scenarios in which field-caps return information from outdated shards. While this is probably acceptable for most cases, ESQL query planning relies on field-caps, potentially leading to missing data. The reason for this is that we don't check readAllowed when not acquiring a searcher for cases without a filter. I don't expect too much penalty in terms of performance with this change, but it helps avoid a subtle issue for ESQL. Closes #104809 --- docs/changelog/105153.yaml | 6 ++++++ .../action/fieldcaps/FieldCapabilitiesFetcher.java | 11 +++++++++-- 2 files changed, 15 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/105153.yaml diff --git a/docs/changelog/105153.yaml b/docs/changelog/105153.yaml new file mode 100644 index 0000000000000..6c6b1f995df4b --- /dev/null +++ b/docs/changelog/105153.yaml @@ -0,0 +1,6 @@ +pr: 105153 +summary: Field-caps should read fields from up-to-dated shards +area: "Search" +type: bug +issues: + - 104809 diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java index 969d86f5f470c..641ca33d8e05b 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java @@ -57,8 +57,15 @@ FieldCapabilitiesIndexResponse fetch( ) throws IOException { final IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); final IndexShard indexShard = indexService.getShard(shardId.getId()); - // no need to open a searcher if we aren't filtering - try (Engine.Searcher searcher = alwaysMatches(indexFilter) ? null : indexShard.acquireSearcher(Engine.CAN_MATCH_SEARCH_SOURCE)) { + final Engine.Searcher searcher; + if (alwaysMatches(indexFilter)) { + // no need to open a searcher if we aren't filtering, but make sure we are reading from an up-to-dated shard + indexShard.readAllowed(); + searcher = null; + } else { + searcher = indexShard.acquireSearcher(Engine.CAN_MATCH_SEARCH_SOURCE); + } + try (searcher) { return doFetch( task, shardId, From 313c63681f1fe15437f548e561789dbce537195e Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 5 Feb 2024 16:18:38 -0800 Subject: [PATCH 039/106] Adjust adoptium download url (#105161) The url for downloading JDKs from adoptium appears to use the semver version, not the openjdk version. I encountered this with a windows build. The current url was https://api.adoptium.net/v3/binary/version/jdk-17.0.9+9/windows/x64/jdk/hotspot/normal/eclipse?project=jdk which returns a version not found error, while https://api.adoptium.net/v3/binary/version/jdk-17.0.9+9.1/windows/x64/jdk/hotspot/normal/eclipse?project=jdk correctly downloads the jdk. --- .../AdoptiumJdkToolchainResolver.java | 2 +- .../AdoptiumJdkToolchainResolverSpec.groovy | 32 +++++++++---------- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolver.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolver.java index bddf95cae77d4..0270ee22ca8c5 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolver.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolver.java @@ -101,7 +101,7 @@ private AdoptiumVersionInfo toVersionInfo(JsonNode node) { private URI resolveDownloadURI(AdoptiumVersionRequest request, AdoptiumVersionInfo versionInfo) { return URI.create( "https://api.adoptium.net/v3/binary/version/jdk-" - + versionInfo.openjdkVersion + + versionInfo.semver + "/" + request.platform + "/" diff --git a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy index 7b8129f8dbaec..6383d577f027f 100644 --- a/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy +++ b/build-tools-internal/src/test/groovy/org/elasticsearch/gradle/internal/toolchain/AdoptiumJdkToolchainResolverSpec.groovy @@ -42,7 +42,7 @@ class AdoptiumJdkToolchainResolverSpec extends AbstractToolchainResolverSpec { 1, 1, "" + languageVersion.asInt() + ".1.1.1+37", - 0, "" + languageVersion.asInt() + ".1.1.1" + 0, "" + languageVersion.asInt() + ".1.1.1+37.1" ))) } @@ -52,22 +52,22 @@ class AdoptiumJdkToolchainResolverSpec extends AbstractToolchainResolverSpec { @Override def supportedRequests() { return [ - [19, ADOPTIUM, MAC_OS, X86_64, "https://api.adoptium.net/v3/binary/version/jdk-19.1.1.1+37/mac/x64/jdk/hotspot/normal/eclipse?project=jdk"], - [19, ADOPTIUM, LINUX, X86_64, "https://api.adoptium.net/v3/binary/version/jdk-19.1.1.1+37/linux/x64/jdk/hotspot/normal/eclipse?project=jdk"], - [19, ADOPTIUM, WINDOWS, X86_64, "https://api.adoptium.net/v3/binary/version/jdk-19.1.1.1+37/windows/x64/jdk/hotspot/normal/eclipse?project=jdk"], - [19, ADOPTIUM, MAC_OS, AARCH64, "https://api.adoptium.net/v3/binary/version/jdk-19.1.1.1+37/mac/aarch64/jdk/hotspot/normal/eclipse?project=jdk"], - [19, ADOPTIUM, LINUX, AARCH64, "https://api.adoptium.net/v3/binary/version/jdk-19.1.1.1+37/linux/aarch64/jdk/hotspot/normal/eclipse?project=jdk"], + [19, ADOPTIUM, MAC_OS, X86_64, "https://api.adoptium.net/v3/binary/version/jdk-19.1.1.1+37.1/mac/x64/jdk/hotspot/normal/eclipse?project=jdk"], + [19, ADOPTIUM, LINUX, X86_64, "https://api.adoptium.net/v3/binary/version/jdk-19.1.1.1+37.1/linux/x64/jdk/hotspot/normal/eclipse?project=jdk"], + [19, ADOPTIUM, WINDOWS, X86_64, "https://api.adoptium.net/v3/binary/version/jdk-19.1.1.1+37.1/windows/x64/jdk/hotspot/normal/eclipse?project=jdk"], + [19, ADOPTIUM, MAC_OS, AARCH64, "https://api.adoptium.net/v3/binary/version/jdk-19.1.1.1+37.1/mac/aarch64/jdk/hotspot/normal/eclipse?project=jdk"], + [19, ADOPTIUM, LINUX, AARCH64, "https://api.adoptium.net/v3/binary/version/jdk-19.1.1.1+37.1/linux/aarch64/jdk/hotspot/normal/eclipse?project=jdk"], - [18, ADOPTIUM, MAC_OS, X86_64, "https://api.adoptium.net/v3/binary/version/jdk-18.1.1.1+37/mac/x64/jdk/hotspot/normal/eclipse?project=jdk"], - [18, ADOPTIUM, LINUX, X86_64, "https://api.adoptium.net/v3/binary/version/jdk-18.1.1.1+37/linux/x64/jdk/hotspot/normal/eclipse?project=jdk"], - [18, ADOPTIUM, WINDOWS, X86_64, "https://api.adoptium.net/v3/binary/version/jdk-18.1.1.1+37/windows/x64/jdk/hotspot/normal/eclipse?project=jdk"], - [18, ADOPTIUM, MAC_OS, AARCH64, "https://api.adoptium.net/v3/binary/version/jdk-18.1.1.1+37/mac/aarch64/jdk/hotspot/normal/eclipse?project=jdk"], - [18, ADOPTIUM, LINUX, AARCH64, "https://api.adoptium.net/v3/binary/version/jdk-18.1.1.1+37/linux/aarch64/jdk/hotspot/normal/eclipse?project=jdk"], - [17, ADOPTIUM, MAC_OS, X86_64, "https://api.adoptium.net/v3/binary/version/jdk-17.1.1.1+37/mac/x64/jdk/hotspot/normal/eclipse?project=jdk"], - [17, ADOPTIUM, LINUX, X86_64, "https://api.adoptium.net/v3/binary/version/jdk-17.1.1.1+37/linux/x64/jdk/hotspot/normal/eclipse?project=jdk"], - [17, ADOPTIUM, WINDOWS, X86_64, "https://api.adoptium.net/v3/binary/version/jdk-17.1.1.1+37/windows/x64/jdk/hotspot/normal/eclipse?project=jdk"], - [17, ADOPTIUM, MAC_OS, AARCH64, "https://api.adoptium.net/v3/binary/version/jdk-17.1.1.1+37/mac/aarch64/jdk/hotspot/normal/eclipse?project=jdk"], - [17, ADOPTIUM, LINUX, AARCH64, "https://api.adoptium.net/v3/binary/version/jdk-17.1.1.1+37/linux/aarch64/jdk/hotspot/normal/eclipse?project=jdk"] + [18, ADOPTIUM, MAC_OS, X86_64, "https://api.adoptium.net/v3/binary/version/jdk-18.1.1.1+37.1/mac/x64/jdk/hotspot/normal/eclipse?project=jdk"], + [18, ADOPTIUM, LINUX, X86_64, "https://api.adoptium.net/v3/binary/version/jdk-18.1.1.1+37.1/linux/x64/jdk/hotspot/normal/eclipse?project=jdk"], + [18, ADOPTIUM, WINDOWS, X86_64, "https://api.adoptium.net/v3/binary/version/jdk-18.1.1.1+37.1/windows/x64/jdk/hotspot/normal/eclipse?project=jdk"], + [18, ADOPTIUM, MAC_OS, AARCH64, "https://api.adoptium.net/v3/binary/version/jdk-18.1.1.1+37.1/mac/aarch64/jdk/hotspot/normal/eclipse?project=jdk"], + [18, ADOPTIUM, LINUX, AARCH64, "https://api.adoptium.net/v3/binary/version/jdk-18.1.1.1+37.1/linux/aarch64/jdk/hotspot/normal/eclipse?project=jdk"], + [17, ADOPTIUM, MAC_OS, X86_64, "https://api.adoptium.net/v3/binary/version/jdk-17.1.1.1+37.1/mac/x64/jdk/hotspot/normal/eclipse?project=jdk"], + [17, ADOPTIUM, LINUX, X86_64, "https://api.adoptium.net/v3/binary/version/jdk-17.1.1.1+37.1/linux/x64/jdk/hotspot/normal/eclipse?project=jdk"], + [17, ADOPTIUM, WINDOWS, X86_64, "https://api.adoptium.net/v3/binary/version/jdk-17.1.1.1+37.1/windows/x64/jdk/hotspot/normal/eclipse?project=jdk"], + [17, ADOPTIUM, MAC_OS, AARCH64, "https://api.adoptium.net/v3/binary/version/jdk-17.1.1.1+37.1/mac/aarch64/jdk/hotspot/normal/eclipse?project=jdk"], + [17, ADOPTIUM, LINUX, AARCH64, "https://api.adoptium.net/v3/binary/version/jdk-17.1.1.1+37.1/linux/aarch64/jdk/hotspot/normal/eclipse?project=jdk"] ] } From af163b2e044d1588140548409005b7428163dd81 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Mon, 5 Feb 2024 16:49:56 -0800 Subject: [PATCH 040/106] Fix LuceneSourceOperatorStatusTests (#105169) Closes #103774 --- .../compute/lucene/LuceneSourceOperatorStatusTests.java | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorStatusTests.java index 01c5273a1e617..6c787052a8ae7 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorStatusTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.compute.lucene; -import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; @@ -19,7 +18,6 @@ import static org.hamcrest.Matchers.equalTo; -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103774") public class LuceneSourceOperatorStatusTests extends AbstractWireSerializingTestCase { public static LuceneSourceOperator.Status simple() { return new LuceneSourceOperator.Status(2, Set.of("*:*"), new TreeSet<>(List.of("a:0", "a:1")), 0, 1, 5, 123, 99990, 8000); @@ -101,7 +99,7 @@ protected LuceneSourceOperator.Status mutateInstance(LuceneSourceOperator.Status switch (between(0, 8)) { case 0 -> processedSlices = randomValueOtherThan(processedSlices, ESTestCase::randomNonNegativeInt); case 1 -> processedQueries = randomValueOtherThan(processedQueries, LuceneSourceOperatorStatusTests::randomProcessedQueries); - case 2 -> processedQueries = randomValueOtherThan(processedShards, LuceneSourceOperatorStatusTests::randomProcessedShards); + case 2 -> processedShards = randomValueOtherThan(processedShards, LuceneSourceOperatorStatusTests::randomProcessedShards); case 3 -> sliceIndex = randomValueOtherThan(sliceIndex, ESTestCase::randomNonNegativeInt); case 4 -> totalSlices = randomValueOtherThan(totalSlices, ESTestCase::randomNonNegativeInt); case 5 -> pagesEmitted = randomValueOtherThan(pagesEmitted, ESTestCase::randomNonNegativeInt); From ac09d75078efc08610f016779dc57299eeb768d0 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Tue, 6 Feb 2024 05:08:10 +0200 Subject: [PATCH 041/106] ESQL: Extend STATS command to support aggregate expressions (#104958) Previously only aggregate functions (max/sum/etc..) were allowed inside the stats command. This PR allows expressions involving one or multiple aggregates to be used, such as: stats x = avg(salary % 3) + max(emp_no), y = min(emp_no / 3) + 10 - median(salary) by z = languages % 2 Improve verifier to not allow scalar functions over grouping for now --- docs/changelog/104958.yaml | 5 + .../src/main/resources/stats.csv-spec | 47 +++ .../main/resources/stats_percentile.csv-spec | 12 +- .../xpack/esql/analysis/Verifier.java | 81 +++-- .../esql/optimizer/LogicalPlanOptimizer.java | 328 ++++++++++++------ .../xpack/esql/parser/ExpressionBuilder.java | 2 +- .../xpack/esql/analysis/AnalyzerTests.java | 112 ++++++ .../xpack/esql/analysis/VerifierTests.java | 58 +++- .../optimizer/LogicalPlanOptimizerTests.java | 295 +++++++++++++++- 9 files changed, 798 insertions(+), 142 deletions(-) create mode 100644 docs/changelog/104958.yaml diff --git a/docs/changelog/104958.yaml b/docs/changelog/104958.yaml new file mode 100644 index 0000000000000..936342db03b45 --- /dev/null +++ b/docs/changelog/104958.yaml @@ -0,0 +1,5 @@ +pr: 104958 +summary: "ESQL: Extend STATS command to support aggregate expressions" +area: ES|QL +type: enhancement +issues: [] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 65b01aae461e5..fbb38df87ed75 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -1112,3 +1112,50 @@ STATS ck = COUNT(job_positions), ck:l | cb:l | cd:l | ci:l | c:l | csv:l 221 | 204 | 183 | 183 | 100 | 100 ; + +nestedAggsNoGrouping#[skip:-8.12.99,reason:supported in 8.13+] +FROM employees +| STATS x = AVG(salary) / 2 + MAX(salary), a = AVG(salary), m = MAX(salary) +; + +x:d | a:d | m:i +99123.275 | 48248.55 |74999 +; + +nestedAggsWithGrouping#[skip:-8.12.99,reason:supported in 8.13+] +FROM employees +| STATS x = ROUND(AVG(salary % 3)) + MAX(emp_no), y = MIN(emp_no / 3) + 10 - MEDIAN(salary) by z = languages % 2 +| SORT z +; + +x:d | y:d | z:i +10101 | -41474.0 | 0 +10098 | -45391.0 | 1 +10030 | -44714.5 | null +; + +nestedAggsWithScalars#[skip:-8.12.99,reason:supported in 8.13+] +FROM employees +| STATS x = CONCAT(TO_STRING(ROUND(AVG(salary % 3))), TO_STRING(MAX(emp_no))), + y = ROUND((MIN(emp_no / 3) + PI() - MEDIAN(salary))/E()) + BY z = languages % 2 +; + +x:s | y:d | z:i +1.010029 | -16452.0 | null +1.010100 | -15260.0 | 0 +1.010097 | -16701.0 | 1 +; + +nestedAggsOverGroupingWithAlias#[skip:-8.12.99,reason:supported in 8.13] +FROM employees +| STATS e = max(languages) + 1 by l = languages +| SORT l +| LIMIT 3 +; + +e:i | l:i +2 | 1 +3 | 2 +4 | 3 +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_percentile.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_percentile.csv-spec index 8ac93dc5455bd..db386e877b9c3 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_percentile.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats_percentile.csv-spec @@ -70,14 +70,14 @@ NULL ; -medianOfLong#[skip:-8.11.99,reason:ReplaceDuplicateAggWithEval breaks bwc gh-103765] +medianOfLong#[skip:-8.12.99,reason:ReplaceStatsAggExpressionWithEval breaks bwc gh-103765] from employees | stats m = median(salary_change.long), p50 = percentile(salary_change.long, 50); m:double | p50:double 0 | 0 ; -medianOfInteger#[skip:-8.12.99,reason:ReplaceDuplicateAggWithEval breaks bwc gh-103765/Expression spaces are maintained since 8.13] +medianOfInteger#[skip:-8.12.99,reason:ReplaceStatsAggExpressionWithEval breaks bwc/Expression spaces are maintained since 8.13] // tag::median[] FROM employees | STATS MEDIAN(salary), PERCENTILE(salary, 50) @@ -90,7 +90,7 @@ MEDIAN(salary):double | PERCENTILE(salary, 50):double // end::median-result[] ; -medianOfDouble#[skip:-8.11.99,reason:ReplaceDuplicateAggWithEval breaks bwc gh-103765] +medianOfDouble#[skip:-8.12.99,reason:ReplaceStatsAggExpressionWithEval breaks bwc gh-103765] from employees | stats m = median(salary_change), p50 = percentile(salary_change, 50); m:double | p50:double @@ -98,7 +98,7 @@ m:double | p50:double ; -medianOfLongByKeyword#[skip:-8.11.99,reason:ReplaceDuplicateAggWithEval breaks bwc gh-103765] +medianOfLongByKeyword#[skip:-8.12.99,reason:ReplaceStatsAggExpressionWithEval breaks bwc gh-103765] from employees | stats m = median(salary_change.long), p50 = percentile(salary_change.long, 50) by job_positions | sort m desc | limit 4; m:double | p50:double | job_positions:keyword @@ -109,7 +109,7 @@ m:double | p50:double | job_positions:keyword ; -medianOfIntegerByKeyword#[skip:-8.11.99,reason:ReplaceDuplicateAggWithEval breaks bwc gh-103765] +medianOfIntegerByKeyword#[skip:-8.12.99,reason:ReplaceStatsAggExpressionWithEval breaks bwc gh-103765] from employees | stats m = median(salary), p50 = percentile(salary, 50) by job_positions | sort m | limit 4; m:double | p50:double | job_positions:keyword @@ -120,7 +120,7 @@ m:double | p50:double | job_positions:keyword ; -medianOfDoubleByKeyword#[skip:-8.11.99,reason:ReplaceDuplicateAggWithEval breaks bwc gh-103765] +medianOfDoubleByKeyword#[skip:-8.12.99,reason:ReplaceStatsAggExpressionWithEval breaks bwc gh-103765] from employees | stats m = median(salary_change), p50 = percentile(salary_change, 50)by job_positions | sort m desc | limit 4; m:double | p50:double | job_positions:keyword diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index d0d1a4f4ef573..903c0f948f2e1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -21,8 +21,8 @@ import org.elasticsearch.xpack.ql.capabilities.Unresolvable; import org.elasticsearch.xpack.ql.common.Failure; import org.elasticsearch.xpack.ql.expression.Alias; +import org.elasticsearch.xpack.ql.expression.AttributeMap; import org.elasticsearch.xpack.ql.expression.Expression; -import org.elasticsearch.xpack.ql.expression.Expressions; import org.elasticsearch.xpack.ql.expression.NamedExpression; import org.elasticsearch.xpack.ql.expression.TypeResolutions; import org.elasticsearch.xpack.ql.expression.UnresolvedAttribute; @@ -67,6 +67,8 @@ public Verifier(Metrics metrics) { Collection verify(LogicalPlan plan, BitSet partialMetrics) { assert partialMetrics != null; Set failures = new LinkedHashSet<>(); + // alias map, collected during the first iteration for better error messages + AttributeMap aliases = new AttributeMap<>(); // quick verification for unresolved attributes plan.forEachUp(p -> { @@ -80,6 +82,7 @@ Collection verify(LogicalPlan plan, BitSet partialMetrics) { } // p is resolved, skip else if (p.resolved()) { + p.forEachExpressionUp(Alias.class, a -> aliases.put(a.toAttribute(), a.child())); return; } // handle aggregate first to disambiguate between missing fields or incorrect function declaration @@ -128,7 +131,7 @@ else if (p.resolved()) { return; } checkFilterConditionType(p, failures); - checkAggregate(p, failures); + checkAggregate(p, failures, aliases); checkRegexExtractOnlyOnStrings(p, failures); checkRow(p, failures); @@ -147,38 +150,60 @@ else if (p.resolved()) { return failures; } - private static void checkAggregate(LogicalPlan p, Set failures) { + private static void checkAggregate(LogicalPlan p, Set failures, AttributeMap aliases) { if (p instanceof Aggregate agg) { - // check aggregates + + List nakedGroups = new ArrayList<>(agg.groupings().size()); + // check grouping + // The grouping can not be an aggregate function + agg.groupings().forEach(e -> { + e.forEachUp(g -> { + if (g instanceof AggregateFunction af) { + failures.add(fail(g, "cannot use an aggregate [{}] for grouping", af)); + } + }); + nakedGroups.add(Alias.unwrap(e)); + }); + + // check aggregates - accept only aggregate functions or expressions in which each naked attribute is copied as + // specified in the grouping clause agg.aggregates().forEach(e -> { var exp = Alias.unwrap(e); - if (exp instanceof AggregateFunction af) { - af.field().forEachDown(AggregateFunction.class, f -> { - failures.add(fail(f, "nested aggregations [{}] not allowed inside other aggregations [{}]", f, af)); - }); - } else { - if (Expressions.match(agg.groupings(), g -> Alias.unwrap(g).semanticEquals(exp)) == false) { - failures.add( - fail( - exp, - "expected an aggregate function or group but got [" - + exp.sourceText() - + "] of type [" - + exp.nodeName() - + "]" - ) - ); - } + if (exp.foldable()) { + failures.add(fail(exp, "expected an aggregate function but found [{}]", exp.sourceText())); } + // traverse the tree to find invalid matches + checkInvalidNamedExpressionUsage(exp, nakedGroups, failures, 0); }); + } + } - // check grouping - // The grouping can not be an aggregate function - agg.groupings().forEach(e -> e.forEachUp(g -> { - if (g instanceof AggregateFunction af) { - failures.add(fail(g, "cannot use an aggregate [{}] for grouping", af)); - } - })); + // traverse the expression and look either for an agg function or a grouping match + // stop either when no children are left, the leaves are literals or a reference attribute is given + private static void checkInvalidNamedExpressionUsage(Expression e, List groups, Set failures, int level) { + // found an aggregate, constant or a group, bail out + if (e instanceof AggregateFunction af) { + af.field().forEachDown(AggregateFunction.class, f -> { + failures.add(fail(f, "nested aggregations [{}] not allowed inside other aggregations [{}]", f, af)); + }); + } else if (e.foldable()) { + // don't do anything + } + // don't allow nested groupings for now stats substring(group) by group as we don't optimize yet for them + else if (groups.contains(e)) { + if (level != 0) { + failures.add(fail(e, "scalar functions over groupings [{}] not allowed yet", e.sourceText())); + } + } + // if a reference is found, mark it as an error + else if (e instanceof NamedExpression ne) { + failures.add(fail(e, "column [{}] must appear in the STATS BY clause or be used in an aggregate function", ne.name())); + } + // other keep on going + else { + for (Expression child : e.children()) { + checkInvalidNamedExpressionUsage(child, groups, failures, level + 1); + } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index 81f712ae0408a..71595b074afc7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -22,7 +22,6 @@ import org.elasticsearch.xpack.esql.plan.logical.MvExpand; import org.elasticsearch.xpack.esql.plan.logical.RegexExtract; import org.elasticsearch.xpack.esql.plan.logical.TopN; -import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; import org.elasticsearch.xpack.esql.planner.PlannerUtils; @@ -77,6 +76,7 @@ import java.util.function.Predicate; import static java.util.Arrays.asList; +import static java.util.Collections.singleton; import static org.elasticsearch.xpack.esql.expression.NamedExpressions.mergeOutputExpressions; import static org.elasticsearch.xpack.ql.expression.Expressions.asAttributes; import static org.elasticsearch.xpack.ql.optimizer.OptimizerRules.FoldNull; @@ -138,22 +138,20 @@ protected static Batch operators() { } protected static Batch cleanup() { - return new Batch<>( - "Clean Up", - new ReplaceDuplicateAggWithEval(), - // pushing down limits again, because ReplaceDuplicateAggWithEval could create new Project nodes that can still be optimized - new PushDownAndCombineLimits(), - new ReplaceLimitAndSortAsTopN() - ); + return new Batch<>("Clean Up", new ReplaceLimitAndSortAsTopN()); } protected static List> rules() { var substitutions = new Batch<>( "Substitutions", Limiter.ONCE, + // first extract nested aggs top-level - this simplifies the rest of the rules + new ReplaceStatsAggExpressionWithEval(), + // second extract nested aggs inside of them + new ReplaceStatsNestedExpressionWithEval(), + // lastly replace surrogate functions new SubstituteSurrogates(), new ReplaceRegexMatch(), - new ReplaceNestedExpressionWithEval(), new ReplaceAliasingEvalWithProject() // new NormalizeAggregate(), - waits on https://github.com/elastic/elasticsearch/issues/100634 ); @@ -189,6 +187,7 @@ protected LogicalPlan rule(Aggregate aggregate) { } } + int[] counter = new int[] { 0 }; // 0. check list of surrogate expressions for (NamedExpression agg : aggs) { Expression e = Alias.unwrap(agg); @@ -205,7 +204,7 @@ protected LogicalPlan rule(Aggregate aggregate) { var attr = aggFuncToAttr.get(af); // the agg doesn't exist in the Aggregate, create an alias for it and save its attribute if (attr == null) { - var temporaryName = temporaryName(agg, af); + var temporaryName = temporaryName(af, agg, counter[0]++); // create a synthetic alias (so it doesn't clash with a user defined name) var newAlias = new Alias(agg.source(), temporaryName, null, af, null, true); attr = newAlias.toAttribute(); @@ -239,15 +238,31 @@ protected LogicalPlan rule(Aggregate aggregate) { // project away transient fields and re-enforce the original order using references (not copies) to the original aggs // this works since the replaced aliases have their nameId copied to avoid having to update all references (which has // a cascading effect) - plan = new EsqlProject(source, plan, Expressions.asAttributes(aggs)); + plan = new Project(source, plan, Expressions.asAttributes(aggs)); } } return plan; } - static String temporaryName(NamedExpression agg, AggregateFunction af) { - return "__" + agg.name() + "_" + af.functionName() + "@" + Integer.toHexString(af.hashCode()); + static String temporaryName(Expression inner, Expression outer, int suffix) { + String in = toString(inner); + String out = toString(outer); + return "$$" + in + "$" + out + "$" + suffix; + } + + static int TO_STRING_LIMIT = 16; + + static String toString(Expression ex) { + return ex instanceof AggregateFunction af ? af.functionName() : extractString(ex); + } + + static String extractString(Expression ex) { + return ex instanceof NamedExpression ne ? ne.name() : limitToString(ex.sourceText()).replace(' ', '_'); + } + + static String limitToString(String string) { + return string.length() > 16 ? string.substring(0, TO_STRING_LIMIT - 1) + ">" : string; } } @@ -259,17 +274,23 @@ static class ConvertStringToByteRef extends OptimizerRules.OptimizerExpressionRu @Override protected Expression rule(Literal lit) { - if (lit.value() == null) { + Object value = lit.value(); + + if (value == null) { return lit; } - if (lit.value() instanceof String s) { + if (value instanceof String s) { return Literal.of(lit, new BytesRef(s)); } - if (lit.value() instanceof List l) { + if (value instanceof List l) { if (l.isEmpty() || false == l.get(0) instanceof String) { return lit; } - return Literal.of(lit, l.stream().map(v -> new BytesRef((String) v)).toList()); + List byteRefs = new ArrayList<>(l.size()); + for (Object v : l) { + byteRefs.add(new BytesRef(v.toString())); + } + return Literal.of(lit, byteRefs); } return lit; } @@ -288,39 +309,80 @@ protected LogicalPlan rule(UnaryPlan plan) { if (plan instanceof Project project) { if (child instanceof Project p) { // eliminate lower project but first replace the aliases in the upper one - return p.withProjections(combineProjections(project.projections(), p.projections())); - } else if (child instanceof Aggregate a) { + project = p.withProjections(combineProjections(project.projections(), p.projections())); + child = project.child(); + plan = project; + // don't return the plan since the grandchild (now child) might be an aggregate that could not be folded on the way up + // e.g. stats c = count(x) | project c, c as x | project x + // try to apply the rule again opportunistically as another node might be pushed in (a limit might be pushed in) + } + // check if the projection eliminates certain aggregates + // but be mindful of aliases to existing aggregates that we don't want to duplicate to avoid redundant work + if (child instanceof Aggregate a) { var aggs = a.aggregates(); - var newAggs = combineProjections(project.projections(), aggs); - var newGroups = replacePrunedAliasesUsedInGroupBy(a.groupings(), aggs, newAggs); - return new Aggregate(a.source(), a.child(), newGroups, newAggs); + var newAggs = projectAggregations(project.projections(), aggs); + // project can be fully removed + if (newAggs != null) { + var newGroups = replacePrunedAliasesUsedInGroupBy(a.groupings(), aggs, newAggs); + plan = new Aggregate(a.source(), a.child(), newGroups, newAggs); + } } + return plan; } // Agg with underlying Project (group by on sub-queries) if (plan instanceof Aggregate a) { if (child instanceof Project p) { - return new Aggregate(a.source(), p.child(), a.groupings(), combineProjections(a.aggregates(), p.projections())); + plan = new Aggregate(a.source(), p.child(), a.groupings(), combineProjections(a.aggregates(), p.projections())); } } return plan; } + // variant of #combineProjections specialized for project followed by agg due to the rewrite rules applied on aggregations + // this method tries to combine the projections by paying attention to: + // - aggregations that are projected away - remove them + // - aliases in the project that point to aggregates - keep them in place (to avoid duplicating the aggs) + private static List projectAggregations( + List upperProjection, + List lowerAggregations + ) { + AttributeMap lowerAliases = new AttributeMap<>(); + for (NamedExpression ne : lowerAggregations) { + lowerAliases.put(ne.toAttribute(), Alias.unwrap(ne)); + } + + AttributeSet seen = new AttributeSet(); + for (NamedExpression upper : upperProjection) { + Expression unwrapped = Alias.unwrap(upper); + // projection contains an inner alias (point to an existing fields inside the projection) + if (seen.contains(unwrapped)) { + return null; + } + seen.add(Expressions.attribute(unwrapped)); + } + + lowerAggregations = combineProjections(upperProjection, lowerAggregations); + + return lowerAggregations; + } + // normally only the upper projections should survive but since the lower list might have aliases definitions // that might be reused by the upper one, these need to be replaced. // for example an alias defined in the lower list might be referred in the upper - without replacing it the alias becomes invalid - private List combineProjections(List upper, List lower) { + private static List combineProjections( + List upper, + List lower + ) { // collect aliases in the lower list - AttributeMap.Builder aliasesBuilder = AttributeMap.builder(); + AttributeMap aliases = new AttributeMap<>(); for (NamedExpression ne : lower) { if ((ne instanceof Attribute) == false) { - aliasesBuilder.put(ne.toAttribute(), ne); + aliases.put(ne.toAttribute(), ne); } } - - AttributeMap aliases = aliasesBuilder.build(); List replaced = new ArrayList<>(); // replace any matching attribute with a lower alias (if there's a match) @@ -366,10 +428,7 @@ private List replacePrunedAliasesUsedInGroupBy( } public static Expression trimNonTopLevelAliases(Expression e) { - if (e instanceof Alias a) { - return new Alias(a.source(), a.name(), a.qualifier(), trimAliases(a.child()), a.id()); - } - return trimAliases(e); + return e instanceof Alias a ? a.replaceChild(trimAliases(a.child())) : trimAliases(e); } private static Expression trimAliases(Expression e) { @@ -1071,7 +1130,7 @@ protected Expression regexToEquals(RegexMatch regexMatch, Literal literal) { * becomes * eval `a + 1` = a + 1, `x % 2` = x % 2 | stats sum(`a+1`_ref) by `x % 2`_ref */ - static class ReplaceNestedExpressionWithEval extends OptimizerRules.OptimizerRule { + static class ReplaceStatsNestedExpressionWithEval extends OptimizerRules.OptimizerRule { @Override protected LogicalPlan rule(Aggregate aggregate) { @@ -1103,12 +1162,11 @@ protected LogicalPlan rule(Aggregate aggregate) { expToAttribute.put(a.child().canonical(), a.toAttribute()); } + int[] counter = new int[] { 0 }; // for the aggs make sure to unwrap the agg function and check the existing groupings - for (int i = 0, s = aggs.size(); i < s; i++) { - NamedExpression agg = aggs.get(i); - + for (NamedExpression agg : aggs) { NamedExpression a = (NamedExpression) agg.transformDown(Alias.class, as -> { - // if the child a nested expression + // if the child is a nested expression Expression child = as.child(); // shortcut for common scenario @@ -1123,9 +1181,6 @@ protected LogicalPlan rule(Aggregate aggregate) { return ref; } - // TODO: break expression into aggregate functions (sum(x + 1) / max(y + 2)) - // List afs = a.collectFirstChildren(AggregateFunction.class::isInstance); - // 1. look for the aggregate function var replaced = child.transformUp(AggregateFunction.class, af -> { Expression result = af; @@ -1135,7 +1190,7 @@ protected LogicalPlan rule(Aggregate aggregate) { if (field instanceof Attribute == false && field.foldable() == false) { // 3. create a new alias if one doesn't exist yet no reference Attribute attr = expToAttribute.computeIfAbsent(field.canonical(), k -> { - Alias newAlias = new Alias(k.source(), temporaryName(agg, af), null, k, null, true); + Alias newAlias = new Alias(k.source(), syntheticName(k, af, counter[0]++), null, k, null, true); evals.add(newAlias); aggsChanged.set(true); return newAlias.toAttribute(); @@ -1165,8 +1220,141 @@ protected LogicalPlan rule(Aggregate aggregate) { return aggregate; } - static String temporaryName(NamedExpression agg, AggregateFunction af) { - return SubstituteSurrogates.temporaryName(agg, af); + static String syntheticName(Expression expression, AggregateFunction af, int counter) { + return SubstituteSurrogates.temporaryName(expression, af, counter); + } + } + + /** + * Replace nested expressions over aggregates with synthetic eval post the aggregation + * stats a = sum(a) + min(b) by x + * becomes + * stats a1 = sum(a), a2 = min(b) by x | eval a = a1 + a2 | keep a, x + * + * Since the logic is very similar, this rule also handles duplicate aggregate functions to avoid duplicate compute + * stats a = min(x), b = min(x), c = count(*), d = count() by g + * becomes + * stats a = min(x), c = count(*) by g | eval b = a, d = c | keep a, b, c, d, g + */ + static class ReplaceStatsAggExpressionWithEval extends OptimizerRules.OptimizerRule { + ReplaceStatsAggExpressionWithEval() { + super(TransformDirection.UP); + } + + @Override + protected LogicalPlan rule(Aggregate aggregate) { + // build alias map + AttributeMap aliases = new AttributeMap<>(); + aggregate.forEachExpressionUp(Alias.class, a -> aliases.put(a.toAttribute(), a.child())); + + // break down each aggregate into AggregateFunction + // preserve the projection at the end + List aggs = aggregate.aggregates(); + + // root/naked aggs + Map rootAggs = Maps.newLinkedHashMapWithExpectedSize(aggs.size()); + // evals (original expression relying on multiple aggs) + List newEvals = new ArrayList<>(); + List newProjections = new ArrayList<>(); + // track the aggregate aggs (including grouping which is not an AggregateFunction) + List newAggs = new ArrayList<>(); + + Holder changed = new Holder<>(false); + int[] counter = new int[] { 0 }; + + for (NamedExpression agg : aggs) { + if (agg instanceof Alias as) { + // if the child a nested expression + Expression child = as.child(); + + // common case - handle duplicates + if (child instanceof AggregateFunction af) { + AggregateFunction canonical = (AggregateFunction) af.canonical(); + Expression field = canonical.field().transformUp(e -> aliases.resolve(e, e)); + canonical = (AggregateFunction) canonical.replaceChildren( + CollectionUtils.combine(singleton(field), canonical.parameters()) + ); + + Alias found = rootAggs.get(canonical); + // aggregate is new + if (found == null) { + rootAggs.put(canonical, as); + newAggs.add(as); + newProjections.add(as.toAttribute()); + } + // agg already exists - preserve the current alias but point it to the existing agg + // thus don't add it to the list of aggs as we don't want duplicated compute + else { + changed.set(true); + newProjections.add(as.replaceChild(found.toAttribute())); + } + } + // nested expression over aggregate function - replace them with reference and move the expression into a + // follow-up eval + else { + Holder transformed = new Holder<>(false); + Expression aggExpression = child.transformUp(AggregateFunction.class, af -> { + transformed.set(true); + changed.set(true); + + AggregateFunction canonical = (AggregateFunction) af.canonical(); + Alias alias = rootAggs.get(canonical); + if (alias == null) { + // create synthetic alias ove the found agg function + alias = new Alias( + af.source(), + syntheticName(canonical, child, counter[0]++), + as.qualifier(), + canonical, + null, + true + ); + // and remember it to remove duplicates + rootAggs.put(canonical, alias); + // add it to the list of aggregates and continue + newAggs.add(alias); + } + // (even when found) return a reference to it + return alias.toAttribute(); + }); + + Alias alias = as; + if (transformed.get()) { + // if at least a change occurred, update the alias and add it to the eval + alias = as.replaceChild(aggExpression); + newEvals.add(alias); + } + // aliased grouping + else { + newAggs.add(alias); + } + + newProjections.add(alias.toAttribute()); + } + } + // not an alias (e.g. grouping field) + else { + newAggs.add(agg); + newProjections.add(agg.toAttribute()); + } + } + + LogicalPlan plan = aggregate; + if (changed.get()) { + Source source = aggregate.source(); + plan = new Aggregate(source, aggregate.child(), aggregate.groupings(), newAggs); + if (newEvals.size() > 0) { + plan = new Eval(source, plan, newEvals); + } + // preserve initial projection + plan = new Project(source, plan, newProjections); + } + + return plan; + } + + static String syntheticName(Expression expression, Expression af, int counter) { + return SubstituteSurrogates.temporaryName(expression, af, counter); } } @@ -1330,58 +1518,4 @@ private static LogicalPlan normalize(Aggregate aggregate, AttributeMap { - - ReplaceDuplicateAggWithEval() { - super(TransformDirection.UP); - } - - @Override - protected LogicalPlan rule(Aggregate aggregate) { - LogicalPlan plan = aggregate; - - boolean foundDuplicate = false; - var aggs = aggregate.aggregates(); - Map seenAggs = Maps.newMapWithExpectedSize(aggs.size()); - List projections = new ArrayList<>(); - List keptAggs = new ArrayList<>(aggs.size()); - - for (NamedExpression agg : aggs) { - var attr = agg.toAttribute(); - if (agg instanceof Alias as && as.child() instanceof AggregateFunction af) { - var seen = seenAggs.putIfAbsent(af, attr); - if (seen != null) { - foundDuplicate = true; - projections.add(as.replaceChild(seen)); - } - // otherwise keep the agg in place - else { - keptAggs.add(agg); - projections.add(attr); - } - } else { - keptAggs.add(agg); - projections.add(attr); - } - } - - // at least one duplicate found - add the projection (to keep the output in place) - if (foundDuplicate) { - var source = aggregate.source(); - var newAggregate = new Aggregate(source, aggregate.child(), aggregate.groupings(), keptAggs); - plan = new Project(source, newAggregate, projections); - } - - return plan; - } - } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index d4cfb6b95176b..e07494c19e1ff 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -423,7 +423,7 @@ public List visitFields(EsqlBaseParser.FieldsContext ctx) { } /** - * Similar to {@link #visitFields(EsqlBaseParser.FieldsContext)} however avoids wrapping the exception + * Similar to {@link #visitFields(EsqlBaseParser.FieldsContext)} however avoids wrapping the expression * into an Alias. */ public List visitGrouping(EsqlBaseParser.FieldsContext ctx) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index ee77ff93b7687..a1f1aae7e6e25 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -1554,6 +1555,112 @@ public void testUnresolvedMvExpand() { assertThat(e.getMessage(), containsString("Unknown column [bar]")); } + public void testRegularStats() { + var plan = analyze(""" + from tests + | stats by salary + """); + + var limit = as(plan, Limit.class); + } + + public void testLiteralInAggregateNoGrouping() { + var e = expectThrows(VerificationException.class, () -> analyze(""" + from test + |stats 1 + """)); + + assertThat(e.getMessage(), containsString("expected an aggregate function but found [1]")); + } + + public void testLiteralBehindEvalInAggregateNoGrouping() { + var e = expectThrows(VerificationException.class, () -> analyze(""" + from test + |eval x = 1 + |stats x + """)); + + assertThat(e.getMessage(), containsString("column [x] must appear in the STATS BY clause or be used in an aggregate function")); + } + + public void testLiteralsInAggregateNoGrouping() { + var e = expectThrows(VerificationException.class, () -> analyze(""" + from test + |stats 1 + 2 + """)); + + assertThat(e.getMessage(), containsString("expected an aggregate function but found [1 + 2]")); + } + + public void testLiteralsBehindEvalInAggregateNoGrouping() { + var e = expectThrows(VerificationException.class, () -> analyze(""" + from test + |eval x = 1 + 2 + |stats x + """)); + + assertThat(e.getMessage(), containsString("column [x] must appear in the STATS BY clause or be used in an aggregate function")); + } + + public void testFoldableInAggregateWithGrouping() { + var e = expectThrows(VerificationException.class, () -> analyze(""" + from test + |stats 1 + 2 by languages + """)); + + assertThat(e.getMessage(), containsString("expected an aggregate function but found [1 + 2]")); + } + + public void testLiteralsInAggregateWithGrouping() { + var e = expectThrows(VerificationException.class, () -> analyze(""" + from test + |stats "a" by languages + """)); + + assertThat(e.getMessage(), containsString("expected an aggregate function but found [\"a\"]")); + } + + public void testFoldableBehindEvalInAggregateWithGrouping() { + var e = expectThrows(VerificationException.class, () -> analyze(""" + from test + |eval x = 1 + 2 + |stats x by languages + """)); + + assertThat(e.getMessage(), containsString("column [x] must appear in the STATS BY clause or be used in an aggregate function")); + } + + public void testFoldableInGrouping() { + var e = expectThrows(VerificationException.class, () -> analyze(""" + from test + |stats x by 1 + """)); + + assertThat(e.getMessage(), containsString("[x] is not an aggregate function")); + } + + public void testScalarFunctionsInStats() { + var e = expectThrows(VerificationException.class, () -> analyze(""" + from test + |stats salary % 3 by languages + """)); + + assertThat( + e.getMessage(), + containsString("column [salary] must appear in the STATS BY clause or be used in an aggregate function") + ); + } + + public void testDeferredGroupingInStats() { + var e = expectThrows(VerificationException.class, () -> analyze(""" + from test + |eval x = first_name + |stats x by first_name + """)); + + assertThat(e.getMessage(), containsString("column [x] must appear in the STATS BY clause or be used in an aggregate function")); + } + public void testUnsupportedTypesInStats() { verifyUnsupported( """ @@ -1654,4 +1761,9 @@ private void assertEmptyEsRelation(LogicalPlan plan) { assertThat(esRelation.output(), equalTo(NO_FIELDS)); assertTrue(esRelation.index().mapping().isEmpty()); } + + @Override + protected IndexAnalyzers createDefaultIndexAnalyzers() { + return super.createDefaultIndexAnalyzers(); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 06d20de70bce3..1257cc5ee8bd6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -62,13 +62,17 @@ public void testRoundFunctionInvalidInputs() { public void testAggsExpressionsInStatsAggs() { assertEquals( - "1:44: expected an aggregate function or group but got [salary] of type [FieldAttribute]", + "1:44: column [salary] must appear in the STATS BY clause or be used in an aggregate function", error("from test | eval z = 2 | stats x = avg(z), salary by emp_no") ); assertEquals( - "1:19: expected an aggregate function or group but got [length(first_name)] of type [Length]", + "1:26: scalar functions over groupings [first_name] not allowed yet", error("from test | stats length(first_name), count(1) by first_name") ); + assertEquals( + "1:36: scalar functions over groupings [languages] not allowed yet", + error("from test | stats max(languages) + languages by l = languages") + ); assertEquals( "1:23: nested aggregations [max(salary)] not allowed inside other aggregations [max(max(salary))]", error("from test | stats max(max(salary)) by first_name") @@ -77,10 +81,6 @@ public void testAggsExpressionsInStatsAggs() { "1:25: argument of [avg(first_name)] must be [numeric except unsigned_long], found value [first_name] type [keyword]", error("from test | stats count(avg(first_name)) by first_name") ); - assertEquals( - "1:23: expected an aggregate function or group but got [emp_no + avg(emp_no)] of type [Add]", - error("from test | stats x = emp_no + avg(emp_no) by emp_no") - ); assertEquals( "1:23: second argument of [percentile(languages, languages)] must be a constant, received [languages]", error("from test | stats x = percentile(languages, languages) by emp_no") @@ -89,6 +89,7 @@ public void testAggsExpressionsInStatsAggs() { "1:23: second argument of [count_distinct(languages, languages)] must be a constant, received [languages]", error("from test | stats x = count_distinct(languages, languages) by emp_no") ); + } public void testAggsInsideGrouping() { @@ -98,10 +99,55 @@ public void testAggsInsideGrouping() { ); } + public void testAggsWithInvalidGrouping() { + assertEquals( + "1:35: column [languages] must appear in the STATS BY clause or be used in an aggregate function", + error("from test| stats max(languages) + languages by l = languages % 3") + ); + } + + public void testAggsIgnoreCanonicalGrouping() { + // the grouping column should appear verbatim - ignore canonical representation as they complicate things significantly + // for no real benefit (1+languages != languages + 1) + assertEquals( + "1:39: column [languages] must appear in the STATS BY clause or be used in an aggregate function", + error("from test| stats max(languages) + 1 + languages by l = languages + 1") + ); + } + + public void testAggsWithoutAgg() { + // should work + assertEquals( + "1:35: column [salary] must appear in the STATS BY clause or be used in an aggregate function", + error("from test| stats max(languages) + salary by l = languages + 1") + ); + } + public void testAggsInsideEval() throws Exception { assertEquals("1:29: aggregate function [max(b)] not allowed outside STATS command", error("row a = 1, b = 2 | eval x = max(b)")); } + public void testAggsWithExpressionOverAggs() { + assertEquals( + "1:44: scalar functions over groupings [languages] not allowed yet", + error("from test | stats max(languages + 1) , m = languages + min(salary + 1) by l = languages, s = salary") + ); + } + + public void testAggScalarOverGroupingColumn() { + assertEquals( + "1:26: scalar functions over groupings [first_name] not allowed yet", + error("from test | stats length(first_name), count(1) by first_name") + ); + } + + public void testGroupingInAggs() { + assertEquals("2:12: column [salary] must appear in the STATS BY clause or be used in an aggregate function", error(""" + from test + |stats e = salary + max(salary) by languages + """)); + } + public void testDoubleRenamingField() { assertEquals( "1:44: Column [emp_no] renamed to [r1] and is no longer available [emp_no as r3]", diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index ed3df60ecf13b..06b81d9c4608e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -28,15 +28,19 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Min; import org.elasticsearch.xpack.esql.expression.function.aggregate.Percentile; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; +import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToString; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateFormat; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateParse; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pow; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Round; +import org.elasticsearch.xpack.esql.expression.function.scalar.string.Concat; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Substring; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mod; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mul; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Sub; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.plan.logical.Dissect; @@ -230,6 +234,28 @@ public void testCombineProjectionWithAggregation() { assertThat(Expressions.names(agg.groupings()), contains("last_name", "first_name")); } + /** + * Project[[s{r}#4 AS d, s{r}#4, last_name{f}#21, first_name{f}#18]] + * \_Limit[500[INTEGER]] + * \_Aggregate[[last_name{f}#21, first_name{f}#18],[SUM(salary{f}#22) AS s, last_name{f}#21, first_name{f}#18]] + * \_EsRelation[test][_meta_field{f}#23, emp_no{f}#17, first_name{f}#18, ..] + */ + public void testCombineProjectionWithDuplicateAggregation() { + var plan = plan(""" + from test + | stats s = sum(salary), d = sum(salary), c = sum(salary) by last_name, first_name + | keep d, s, last_name, first_name + """); + + var project = as(plan, Project.class); + assertThat(Expressions.names(project.projections()), contains("d", "s", "last_name", "first_name")); + var limit = as(project.child(), Limit.class); + var agg = as(limit.child(), Aggregate.class); + assertThat(Expressions.names(agg.aggregates()), contains("s", "last_name", "first_name")); + assertThat(Alias.unwrap(agg.aggregates().get(0)), instanceOf(Sum.class)); + assertThat(Expressions.names(agg.groupings()), contains("last_name", "first_name")); + } + public void testQlComparisonOptimizationsApply() { var plan = plan(""" from test @@ -1774,11 +1800,11 @@ public void testSimpleAvgReplacement() { var agg = as(limit.child(), Aggregate.class); var aggs = agg.aggregates(); var a = as(aggs.get(0), Alias.class); - assertThat(a.name(), startsWith("__a_SUM@")); + assertThat(a.name(), startsWith("$$SUM$a$")); var sum = as(a.child(), Sum.class); a = as(aggs.get(1), Alias.class); - assertThat(a.name(), startsWith("__a_COUNT@")); + assertThat(a.name(), startsWith("$$COUNT$a$")); var count = as(a.child(), Count.class); assertThat(Expressions.names(agg.groupings()), contains("last_name")); @@ -1799,7 +1825,7 @@ public void testClashingAggAvgReplacement() { """); assertThat(Expressions.names(plan.output()), contains("a", "c", "s", "last_name")); - var project = as(plan, EsqlProject.class); + var project = as(plan, Project.class); var eval = as(project.child(), Eval.class); var f = eval.fields(); assertThat(f, hasSize(1)); @@ -1835,7 +1861,7 @@ public void testSemiClashingAvgReplacement() { var agg = as(limit.child(), Aggregate.class); var aggs = agg.aggregates(); var a = as(aggs.get(0), Alias.class); - assertThat(a.name(), startsWith("__a_COUNT@")); + assertThat(a.name(), startsWith("$$COUNT$a$0")); var sum = as(a.child(), Count.class); a = as(aggs.get(1), Alias.class); @@ -2895,6 +2921,267 @@ public void testNestedMultiExpressionsInGroupingAndAggs() { assertThat(Expressions.names(agg.output()), contains("count(salary + 1)", "max(salary + 23)", "languages + 1", "emp_no % 3")); } + /** + * Expects + * Project[[x{r}#5]] + * \_Eval[[____x_AVG@9efc3cf3_SUM@daf9f221{r}#18 / ____x_AVG@9efc3cf3_COUNT@53cd08ed{r}#19 AS __x_AVG@9efc3cf3, __x_AVG@ + * 9efc3cf3{r}#16 / 2[INTEGER] + __x_MAX@475d0e4d{r}#17 AS x]] + * \_Limit[500[INTEGER]] + * \_Aggregate[[],[SUM(salary{f}#11) AS ____x_AVG@9efc3cf3_SUM@daf9f221, COUNT(salary{f}#11) AS ____x_AVG@9efc3cf3_COUNT@53cd0 + * 8ed, MAX(salary{f}#11) AS __x_MAX@475d0e4d]] + * \_EsRelation[test][_meta_field{f}#12, emp_no{f}#6, first_name{f}#7, ge..] + */ + public void testStatsExpOverAggs() { + var plan = optimizedPlan(""" + from test + | stats x = avg(salary) /2 + max(salary) + """); + + var project = as(plan, Project.class); + assertThat(Expressions.names(project.projections()), contains("x")); + var eval = as(project.child(), Eval.class); + var fields = eval.fields(); + assertThat(Expressions.name(fields.get(1)), is("x")); + // sum/count to compute avg + var div = as(fields.get(0).child(), Div.class); + // avg + max + var add = as(fields.get(1).child(), Add.class); + var limit = as(eval.child(), Limit.class); + var agg = as(limit.child(), Aggregate.class); + var aggs = agg.aggregates(); + assertThat(aggs, hasSize(3)); + var sum = as(Alias.unwrap(aggs.get(0)), Sum.class); + assertThat(Expressions.name(sum.field()), is("salary")); + var count = as(Alias.unwrap(aggs.get(1)), Count.class); + assertThat(Expressions.name(count.field()), is("salary")); + var max = as(Alias.unwrap(aggs.get(2)), Max.class); + assertThat(Expressions.name(max.field()), is("salary")); + } + + /** + * Expects + * Project[[x{r}#5, y{r}#9, z{r}#12]] + * \_Eval[[$$SUM$$$AVG$avg(salary_%_3)>$0$0{r}#29 / $$COUNT$$$AVG$avg(salary_%_3)>$0$1{r}#30 AS $$AVG$avg(salary_%_3)>$0, + * $$AVG$avg(salary_%_3)>$0{r}#23 + $$MAX$avg(salary_%_3)>$1{r}#24 AS x, + * $$MIN$min(emp_no_/_3)>$2{r}#25 + 10[INTEGER] - $$MEDIAN$min(emp_no_/_3)>$3{r}#26 AS y]] + * \_Limit[500[INTEGER]] + * \_Aggregate[[z{r}#12],[SUM($$salary_%_3$AVG$0{r}#27) AS $$SUM$$$AVG$avg(salary_%_3)>$0$0, + * COUNT($$salary_%_3$AVG$0{r}#27) AS $$COUNT$$$AVG$avg(salary_%_3)>$0$1, + * MAX(emp_no{f}#13) AS $$MAX$avg(salary_%_3)>$1, + * MIN($$emp_no_/_3$MIN$1{r}#28) AS $$MIN$min(emp_no_/_3)>$2, + * PERCENTILE(salary{f}#18,50[INTEGER]) AS $$MEDIAN$min(emp_no_/_3)>$3, z{r}#12]] + * \_Eval[[languages{f}#16 % 2[INTEGER] AS z, + * salary{f}#18 % 3[INTEGER] AS $$salary_%_3$AVG$0, + * emp_no{f}#13 / 3[INTEGER] AS $$emp_no_/_3$MIN$1]] + * \_EsRelation[test][_meta_field{f}#19, emp_no{f}#13, first_name{f}#14, ..] + */ + public void testStatsExpOverAggsMulti() { + var plan = optimizedPlan(""" + from test + | stats x = avg(salary % 3) + max(emp_no), y = min(emp_no / 3) + 10 - median(salary) by z = languages % 2 + """); + + var project = as(plan, Project.class); + assertThat(Expressions.names(project.projections()), contains("x", "y", "z")); + var eval = as(project.child(), Eval.class); + var fields = eval.fields(); + // avg = Sum/Count + assertThat(Expressions.name(fields.get(0)), containsString("AVG")); + assertThat(Alias.unwrap(fields.get(0)), instanceOf(Div.class)); + // avg + max + assertThat(Expressions.name(fields.get(1)), containsString("x")); + assertThat(Alias.unwrap(fields.get(1)), instanceOf(Add.class)); + // min + 10 - median + assertThat(Expressions.name(fields.get(2)), containsString("y")); + assertThat(Alias.unwrap(fields.get(2)), instanceOf(Sub.class)); + + var limit = as(eval.child(), Limit.class); + + var agg = as(limit.child(), Aggregate.class); + var aggs = agg.aggregates(); + var sum = as(Alias.unwrap(aggs.get(0)), Sum.class); + var count = as(Alias.unwrap(aggs.get(1)), Count.class); + var max = as(Alias.unwrap(aggs.get(2)), Max.class); + var min = as(Alias.unwrap(aggs.get(3)), Min.class); + var percentile = as(Alias.unwrap(aggs.get(4)), Percentile.class); + + eval = as(agg.child(), Eval.class); + fields = eval.fields(); + assertThat(Expressions.name(fields.get(0)), is("z")); + assertThat(Expressions.name(fields.get(1)), containsString("AVG")); + assertThat(Expressions.name(Alias.unwrap(fields.get(1))), containsString("salary")); + assertThat(Expressions.name(fields.get(2)), containsString("MIN")); + assertThat(Expressions.name(Alias.unwrap(fields.get(2))), containsString("emp_no")); + } + + /** + * Expects + * Project[[x{r}#5, y{r}#9, z{r}#12]] + * \_Eval[[$$SUM$$$AVG$CONCAT(TO_STRIN>$0$0{r}#29 / $$COUNT$$$AVG$CONCAT(TO_STRIN>$0$1{r}#30 AS $$AVG$CONCAT(TO_STRIN>$0, + * CONCAT(TOSTRING($$AVG$CONCAT(TO_STRIN>$0{r}#23),TOSTRING($$MAX$CONCAT(TO_STRIN>$1{r}#24)) AS x, + * $$MIN$(MIN(emp_no_/_3>$2{r}#25 + 3.141592653589793[DOUBLE] - $$MEDIAN$(MIN(emp_no_/_3>$3{r}#26 / 2.718281828459045[DOUBLE] + * AS y]] + * \_Limit[500[INTEGER]] + * \_Aggregate[[z{r}#12],[SUM($$salary_%_3$AVG$0{r}#27) AS $$SUM$$$AVG$CONCAT(TO_STRIN>$0$0, + * COUNT($$salary_%_3$AVG$0{r}#27) AS $$COUNT$$$AVG$CONCAT(TO_STRIN>$0$1, + * MAX(emp_no{f}#13) AS $$MAX$CONCAT(TO_STRIN>$1, + * MIN($$emp_no_/_3$MIN$1{r}#28) AS $$MIN$(MIN(emp_no_/_3>$2, + * PERCENTILE(salary{f}#18,50[INTEGER]) AS $$MEDIAN$(MIN(emp_no_/_3>$3, z{r}#12]] + * \_Eval[[languages{f}#16 % 2[INTEGER] AS z, + * salary{f}#18 % 3[INTEGER] AS $$salary_%_3$AVG$0, + * emp_no{f}#13 / 3[INTEGER] AS $$emp_no_/_3$MIN$1]] + * \_EsRelation[test][_meta_field{f}#19, emp_no{f}#13, first_name{f}#14, ..] + */ + public void testStatsExpOverAggsWithScalars() { + var plan = optimizedPlan(""" + from test + | stats x = CONCAT(TO_STRING(AVG(salary % 3)), TO_STRING(MAX(emp_no))), + y = (MIN(emp_no / 3) + PI() - MEDIAN(salary))/E() + by z = languages % 2 + """); + + var project = as(plan, Project.class); + assertThat(Expressions.names(project.projections()), contains("x", "y", "z")); + var eval = as(project.child(), Eval.class); + var fields = eval.fields(); + // avg = Sum/Count + assertThat(Expressions.name(fields.get(0)), containsString("AVG")); + assertThat(Alias.unwrap(fields.get(0)), instanceOf(Div.class)); + // concat(to_string(avg) + assertThat(Expressions.name(fields.get(1)), containsString("x")); + var concat = as(Alias.unwrap(fields.get(1)), Concat.class); + var toString = as(concat.children().get(0), ToString.class); + toString = as(concat.children().get(1), ToString.class); + // min + 10 - median/e + assertThat(Expressions.name(fields.get(2)), containsString("y")); + assertThat(Alias.unwrap(fields.get(2)), instanceOf(Div.class)); + + var limit = as(eval.child(), Limit.class); + + var agg = as(limit.child(), Aggregate.class); + var aggs = agg.aggregates(); + var sum = as(Alias.unwrap(aggs.get(0)), Sum.class); + var count = as(Alias.unwrap(aggs.get(1)), Count.class); + var max = as(Alias.unwrap(aggs.get(2)), Max.class); + var min = as(Alias.unwrap(aggs.get(3)), Min.class); + var percentile = as(Alias.unwrap(aggs.get(4)), Percentile.class); + assertThat(Expressions.name(aggs.get(5)), is("z")); + + eval = as(agg.child(), Eval.class); + fields = eval.fields(); + assertThat(Expressions.name(fields.get(0)), is("z")); + assertThat(Expressions.name(fields.get(1)), containsString("AVG")); + assertThat(Expressions.name(Alias.unwrap(fields.get(1))), containsString("salary")); + assertThat(Expressions.name(fields.get(2)), containsString("MIN")); + assertThat(Expressions.name(Alias.unwrap(fields.get(2))), containsString("emp_no")); + } + + /** + * Expects + * Project[[a{r}#5, b{r}#9, $$max(salary)_+_3>$COUNT$2{r}#46 AS d, $$count(salary)_->$MIN$3{r}#47 AS e, $$avg(salary)_+_m + * >$MAX$1{r}#45 AS g]] + * \_Eval[[$$$$avg(salary)_+_m>$AVG$0$SUM$0{r}#48 / $$max(salary)_+_3>$COUNT$2{r}#46 AS $$avg(salary)_+_m>$AVG$0, $$avg( + * salary)_+_m>$AVG$0{r}#44 + $$avg(salary)_+_m>$MAX$1{r}#45 AS a, $$avg(salary)_+_m>$MAX$1{r}#45 + 3[INTEGER] + + * 3.141592653589793[DOUBLE] + $$max(salary)_+_3>$COUNT$2{r}#46 AS b]] + * \_Limit[500[INTEGER]] + * \_Aggregate[[w{r}#28],[SUM(salary{f}#39) AS $$$$avg(salary)_+_m>$AVG$0$SUM$0, MAX(salary{f}#39) AS $$avg(salary)_+_m>$MAX$1 + * , COUNT(salary{f}#39) AS $$max(salary)_+_3>$COUNT$2, MIN(salary{f}#39) AS $$count(salary)_->$MIN$3]] + * \_Eval[[languages{f}#37 % 2[INTEGER] AS w]] + * \_EsRelation[test][_meta_field{f}#40, emp_no{f}#34, first_name{f}#35, ..] + */ + public void testStatsExpOverAggsWithScalarAndDuplicateAggs() { + var plan = optimizedPlan(""" + from test + | stats a = avg(salary) + max(salary), + b = max(salary) + 3 + PI() + count(salary), + c = count(salary) - min(salary), + d = count(salary), + e = min(salary), + f = max(salary), + g = max(salary) + by w = languages % 2 + | keep a, b, d, e, g + """); + + var project = as(plan, Project.class); + var projections = project.projections(); + assertThat(Expressions.names(projections), contains("a", "b", "d", "e", "g")); + var refA = Alias.unwrap(projections.get(0)); + var refB = Alias.unwrap(projections.get(1)); + var refD = Alias.unwrap(projections.get(2)); + var refE = Alias.unwrap(projections.get(3)); + var refG = Alias.unwrap(projections.get(4)); + + var eval = as(project.child(), Eval.class); + var fields = eval.fields(); + // avg = Sum/Count + assertThat(Expressions.name(fields.get(0)), containsString("AVG")); + assertThat(Alias.unwrap(fields.get(0)), instanceOf(Div.class)); + // avg + max + assertThat(Expressions.name(fields.get(1)), is("a")); + var add = as(Alias.unwrap(fields.get(1)), Add.class); + var max_salary = add.right(); + assertThat(Expressions.attribute(fields.get(1)), is(Expressions.attribute(refA))); + + assertThat(Expressions.name(fields.get(2)), is("b")); + assertThat(Expressions.attribute(fields.get(2)), is(Expressions.attribute(refB))); + + add = as(Alias.unwrap(fields.get(2)), Add.class); + add = as(add.left(), Add.class); + add = as(add.left(), Add.class); + assertThat(Expressions.attribute(max_salary), is(Expressions.attribute(add.left()))); + + var limit = as(eval.child(), Limit.class); + + var agg = as(limit.child(), Aggregate.class); + var aggs = agg.aggregates(); + var sum = as(Alias.unwrap(aggs.get(0)), Sum.class); + + assertThat(Expressions.attribute(aggs.get(1)), is(Expressions.attribute(max_salary))); + var max = as(Alias.unwrap(aggs.get(1)), Max.class); + var count = as(Alias.unwrap(aggs.get(2)), Count.class); + var min = as(Alias.unwrap(aggs.get(3)), Min.class); + + eval = as(agg.child(), Eval.class); + fields = eval.fields(); + assertThat(Expressions.name(fields.get(0)), is("w")); + } + + /** + * Expects + * Project[[a{r}#5, a{r}#5 AS b, w{r}#12]] + * \_Limit[500[INTEGER]] + * \_Aggregate[[w{r}#12],[SUM($$salary_/_2_+_la>$SUM$0{r}#26) AS a, w{r}#12]] + * \_Eval[[emp_no{f}#16 % 2[INTEGER] AS w, salary{f}#21 / 2[INTEGER] + languages{f}#19 AS $$salary_/_2_+_la>$SUM$0]] + * \_EsRelation[test][_meta_field{f}#22, emp_no{f}#16, first_name{f}#17, ..] + */ + public void testStatsWithCanonicalAggregate() throws Exception { + var plan = optimizedPlan(""" + from test + | stats a = sum(salary / 2 + languages), + b = sum(languages + salary / 2) + by w = emp_no % 2 + | keep a, b, w + """); + + var project = as(plan, Project.class); + assertThat(Expressions.names(project.projections()), contains("a", "b", "w")); + assertThat(Expressions.name(Alias.unwrap(project.projections().get(1))), is("a")); + var limit = as(project.child(), Limit.class); + var aggregate = as(limit.child(), Aggregate.class); + var aggregates = aggregate.aggregates(); + assertThat(Expressions.names(aggregates), contains("a", "w")); + var unwrapped = Alias.unwrap(aggregates.get(0)); + var sum = as(unwrapped, Sum.class); + var sum_argument = sum.field(); + var grouping = aggregates.get(1); + + var eval = as(aggregate.child(), Eval.class); + var fields = eval.fields(); + assertThat(Expressions.attribute(fields.get(0)), is(Expressions.attribute(grouping))); + assertThat(Expressions.attribute(fields.get(1)), is(Expressions.attribute(sum_argument))); + } + private LogicalPlan optimizedPlan(String query) { return plan(query); } From e8c2f4ffed027155f864960af4b8fa6c32189a48 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 5 Feb 2024 19:19:17 -0800 Subject: [PATCH 042/106] Provision min runtime version jdk for compilation (#105152) This commit adjusts compile tasks to explicitly provision a Java toolchain for the Java minimum runtime version. By doing so the Java used by Gradle may be upgraded without the possibility of causing spurious warnings from javac which could fail the build, such as when new warnings are added in later JDK versions. --- .../internal/ElasticsearchJavaBasePlugin.java | 18 +++++++++++++++++- .../gradle/internal/MrjarPlugin.java | 3 +-- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java index 4a695e93ebdfe..e224b16bf588e 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java @@ -26,14 +26,26 @@ import org.gradle.api.tasks.compile.CompileOptions; import org.gradle.api.tasks.compile.GroovyCompile; import org.gradle.api.tasks.compile.JavaCompile; +import org.gradle.jvm.toolchain.JavaLanguageVersion; +import org.gradle.jvm.toolchain.JavaToolchainService; import java.util.List; +import javax.inject.Inject; + /** * A wrapper around Gradle's Java Base plugin that applies our * common configuration for production code. */ public class ElasticsearchJavaBasePlugin implements Plugin { + + private final JavaToolchainService javaToolchains; + + @Inject + ElasticsearchJavaBasePlugin(JavaToolchainService javaToolchains) { + this.javaToolchains = javaToolchains; + } + @Override public void apply(Project project) { // make sure the global build info plugin is applied to the root project @@ -103,7 +115,7 @@ private static void disableTransitiveDependenciesForSourceSet(Project project, S /** * Adds compiler settings to the project */ - public static void configureCompile(Project project) { + public void configureCompile(Project project) { project.getExtensions().getExtraProperties().set("compactProfile", "full"); JavaPluginExtension java = project.getExtensions().getByType(JavaPluginExtension.class); if (BuildParams.getJavaToolChainSpec().isPresent()) { @@ -112,6 +124,10 @@ public static void configureCompile(Project project) { java.setSourceCompatibility(BuildParams.getMinimumRuntimeVersion()); java.setTargetCompatibility(BuildParams.getMinimumRuntimeVersion()); project.getTasks().withType(JavaCompile.class).configureEach(compileTask -> { + compileTask.getJavaCompiler().set(javaToolchains.compilerFor(spec -> { + spec.getLanguageVersion().set(JavaLanguageVersion.of(BuildParams.getMinimumRuntimeVersion().getMajorVersion())); + })); + CompileOptions compileOptions = compileTask.getOptions(); /* * -path because gradle will send in paths that don't always exist. diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java index 8b21826447b46..e7bc7e5b6507c 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java @@ -11,7 +11,6 @@ import org.elasticsearch.gradle.util.GradleUtils; import org.gradle.api.Plugin; import org.gradle.api.Project; -import org.gradle.api.plugins.JavaLibraryPlugin; import org.gradle.api.plugins.JavaPlugin; import org.gradle.api.plugins.JavaPluginExtension; import org.gradle.api.tasks.SourceSet; @@ -50,7 +49,7 @@ public class MrjarPlugin implements Plugin { @Override public void apply(Project project) { - project.getPluginManager().apply(JavaLibraryPlugin.class); + project.getPluginManager().apply(ElasticsearchJavaBasePlugin.class); var javaExtension = project.getExtensions().getByType(JavaPluginExtension.class); var srcDir = project.getProjectDir().toPath().resolve("src"); From b1e2cb80405fa70f50027b0e9545ded070fe2ac2 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 5 Feb 2024 20:15:46 -0800 Subject: [PATCH 043/106] Deprecate client.type (#104574) client.type existed from the days of the node client existing alongside the java client. Since the node client no longer exists, it no longer serves a purpose, and is already ignored. Yet the setting still exists. This commit deprecates the client.type node setting. --- docs/changelog/104574.yaml | 10 ++++++++++ .../java/org/elasticsearch/client/internal/Client.java | 2 +- 2 files changed, 11 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/104574.yaml diff --git a/docs/changelog/104574.yaml b/docs/changelog/104574.yaml new file mode 100644 index 0000000000000..68be002142fd9 --- /dev/null +++ b/docs/changelog/104574.yaml @@ -0,0 +1,10 @@ +pr: 104574 +summary: Deprecate `client.type` +area: Infra/Core +type: deprecation +issues: [] +deprecation: + title: Deprecate `client.type` + area: Cluster and node setting + details: The node setting `client.type` has been ignored since the node client was removed in 8.0. The setting is now deprecated and will be removed in a future release. + impact: Remove the `client.type` setting from `elasticsearch.yml` diff --git a/server/src/main/java/org/elasticsearch/client/internal/Client.java b/server/src/main/java/org/elasticsearch/client/internal/Client.java index 668168764a4d0..c6a2b0fee767f 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/Client.java +++ b/server/src/main/java/org/elasticsearch/client/internal/Client.java @@ -78,7 +78,7 @@ public interface Client extends ElasticsearchClient { case "node", "transport" -> s; default -> throw new IllegalArgumentException("Can't parse [client.type] must be one of [node, transport]"); }; - }, Property.NodeScope); + }, Property.NodeScope, Property.Deprecated); /** * The admin client that can be used to perform administrative operations. From 2a298a7acc100a66e71e39beb6cf2355becb5919 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 5 Feb 2024 20:58:53 -0800 Subject: [PATCH 044/106] Add replay diagnostic dir to system jvm options (#103535) When hotspot encounters an error, it will emite a log file which can be used for reproducing the error. This file is dumped to /tmp by default. This commit configures the replay file to be alongside the hs_err file. --- .../server/cli/SystemJvmOptions.java | 23 +++++++++++++++---- docs/changelog/103535.yaml | 5 ++++ 2 files changed, 24 insertions(+), 4 deletions(-) create mode 100644 docs/changelog/103535.yaml diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java index 4a8b3da4777a0..27e79e637299c 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java @@ -19,6 +19,8 @@ final class SystemJvmOptions { static List systemJvmOptions(Settings nodeSettings, final Map sysprops) { + String distroType = sysprops.get("es.distribution.type"); + boolean isHotspot = sysprops.getOrDefault("sun.management.compiler", "").contains("HotSpot"); return Stream.of( /* * Cache ttl in seconds for positive DNS lookups noting that this overrides the JDK security property networkaddress.cache.ttl; @@ -65,10 +67,11 @@ static List systemJvmOptions(Settings nodeSettings, final Map Date: Tue, 6 Feb 2024 08:45:06 +0100 Subject: [PATCH 045/106] [Profiling] Always allow for CO2 and cost defaults (#105173) There are two possibilities to retrieve flamegraph data: * Via the native UI * Via the APM integration Depending on the scenario, different request parameters are set. While we have improved the CO2 and cost calculation for the native UI, the host id, which is required for an improved CO2 and cost calculation, is not yet available for the APM integration. So far we've not performed this calculation at all because there were no associated host data for stacktraces. Consequently, we've returned zero values in all cases. With this commit we associate "dummy" host data so the CO2 and cost calculation falls back to default values. Once a host id is available for that case as well, we will instead use the improved calculations. --- .../xpack/profiling/GetStackTracesActionIT.java | 4 ++++ .../xpack/profiling/TransportGetStackTracesAction.java | 9 ++++----- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java index 8ad68ca7ceebc..6becc2eb6e385 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java @@ -77,6 +77,8 @@ public void testGetStackTracesFromAPMWithMatchNoDownsampling() throws Exception assertEquals(39, stackTrace.fileIds.size()); assertEquals(39, stackTrace.frameIds.size()); assertEquals(39, stackTrace.typeIds.size()); + assertTrue(stackTrace.annualCO2Tons > 0.0d); + assertTrue(stackTrace.annualCostsUSD > 0.0d); assertNotNull(response.getStackFrames()); StackFrame stackFrame = response.getStackFrames().get("fhsEKXDuxJ-jIJrZpdRuSAAAAAAAAFtj"); @@ -141,6 +143,8 @@ public int hashCode() { assertEquals(39, stackTrace.fileIds.size()); assertEquals(39, stackTrace.frameIds.size()); assertEquals(39, stackTrace.typeIds.size()); + assertTrue(stackTrace.annualCO2Tons > 0.0d); + assertTrue(stackTrace.annualCostsUSD > 0.0d); assertNotNull(response.getStackFrames()); StackFrame stackFrame = response.getStackFrames().get("fhsEKXDuxJ-jIJrZpdRuSAAAAAAAAFtj"); diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java index 567c36e6b4404..2674893c2382f 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java @@ -275,11 +275,8 @@ private void searchGenericEventGroupedByStackTrace( SingleBucketAggregation sample = searchResponse.getAggregations().get("sample"); StringTerms stacktraces = sample.getAggregations().get("group_by"); - // When we switch to aggregation by (hostID, stacktraceID) we need to change the empty List to this. - // List hostEventCounts = new ArrayList<>(MAX_TRACE_EVENTS_RESULT_SIZE); - // Related: https://github.com/elastic/prodfiler/issues/4300 - // See also the aggregation in searchEventGroupedByStackTrace() for the other parts of the change. - List hostEventCounts = Collections.emptyList(); + // When we retrieve host data for generic events, we need to adapt the handler similar to searchEventGroupedByStackTrace(). + List hostEventCounts = new ArrayList<>(stacktraces.getBuckets().size()); // aggregation Map stackTraceEvents = new TreeMap<>(); @@ -288,6 +285,8 @@ private void searchGenericEventGroupedByStackTrace( totalSamples += count; String stackTraceID = stacktraceBucket.getKeyAsString(); + // For now, add a dummy-entry so CO2 and cost calculation can operate. In the future we will have one value per host. + hostEventCounts.add(new HostEventCount("unknown", stackTraceID, (int) count)); TraceEvent event = stackTraceEvents.get(stackTraceID); if (event == null) { event = new TraceEvent(stackTraceID); From 24a89e37c430ba9ffa5105ad06905fca036bd663 Mon Sep 17 00:00:00 2001 From: Larisa Motova Date: Mon, 5 Feb 2024 22:18:05 -1000 Subject: [PATCH 046/106] Fix write index resolution with aliases to TSDS's (#104440) Currently when you try to index a document to a TSDS via an alias the alias resolves to the latest backing index of the TSDS. This commit delegates finding the write index to the original data stream the alias points to. Fixes #104189 --- docs/changelog/104440.yaml | 6 ++++++ .../cluster/metadata/IndexAbstraction.java | 9 +++++++++ .../action/index/IndexRequestTests.java | 20 +++++++++++++++++++ 3 files changed, 35 insertions(+) create mode 100644 docs/changelog/104440.yaml diff --git a/docs/changelog/104440.yaml b/docs/changelog/104440.yaml new file mode 100644 index 0000000000000..4242b7786f05f --- /dev/null +++ b/docs/changelog/104440.yaml @@ -0,0 +1,6 @@ +pr: 104440 +summary: Fix write index resolution when an alias is pointing to a TSDS +area: Data streams +type: bug +issues: + - 104189 diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexAbstraction.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexAbstraction.java index 15a196601b7b7..511f3f528fb65 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexAbstraction.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexAbstraction.java @@ -255,6 +255,15 @@ public Index getWriteIndex() { return writeIndex; } + @Override + public Index getWriteIndex(IndexRequest request, Metadata metadata) { + if (dataStreamAlias == false) { + return getWriteIndex(); + } + + return metadata.getIndicesLookup().get(getWriteIndex().getName()).getParentDataStream().getWriteIndex(request, metadata); + } + @Override public DataStream getParentDataStream() { // aliases may not be part of a data stream diff --git a/server/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java b/server/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java index df8aa6ce07b61..327f31a247c30 100644 --- a/server/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.replication.ReplicationResponse; +import org.elasticsearch.cluster.metadata.DataStreamAlias; import org.elasticsearch.cluster.metadata.DataStreamTestHelper; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.bytes.BytesArray; @@ -440,6 +441,25 @@ public void testGetConcreteWriteIndex() { equalTo("Error get data stream timestamp field: timestamp [10.0] type [class java.lang.Double] error") ); } + + { + // Alias to time series data stream + DataStreamAlias alias = new DataStreamAlias("my-alias", List.of(tsdbDataStream), tsdbDataStream, null); + var metadataBuilder3 = Metadata.builder(metadata); + metadataBuilder3.put(alias.getName(), tsdbDataStream, true, null); + var metadata3 = metadataBuilder3.build(); + IndexRequest request = new IndexRequest(alias.getName()); + request.opType(DocWriteRequest.OpType.CREATE); + request.source(renderSource(source, start1), XContentType.JSON); + var result = request.getConcreteWriteIndex(metadata3.getIndicesLookup().get(alias.getName()), metadata3); + assertThat(result, equalTo(metadata3.dataStreams().get(tsdbDataStream).getIndices().get(0))); + + request = new IndexRequest(alias.getName()); + request.opType(DocWriteRequest.OpType.CREATE); + request.source(renderSource(source, start2), XContentType.JSON); + result = request.getConcreteWriteIndex(metadata3.getIndicesLookup().get(alias.getName()), metadata3); + assertThat(result, equalTo(metadata3.dataStreams().get(tsdbDataStream).getIndices().get(1))); + } } static String renderSource(String sourceTemplate, Instant instant) { From b1fb4100ddc396297d675fa897f5f748e16ee090 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Przemys=C5=82aw=20Witek?= Date: Tue, 6 Feb 2024 10:07:35 +0100 Subject: [PATCH 047/106] [Transform] Do not log deduction-related warnings for transforms with disabled mapping deduction (#105138) --- .../TransportPreviewTransformAction.java | 2 +- .../TransportValidateTransformAction.java | 2 +- .../xpack/transform/transforms/Function.java | 2 + .../transform/transforms/latest/Latest.java | 1 + .../transform/transforms/pivot/Pivot.java | 11 +- .../transforms/pivot/SchemaUtil.java | 75 +++++---- .../AggregationSchemaAndResultTests.java | 15 +- .../transforms/pivot/SchemaUtilTests.java | 143 +++++++++++------- 8 files changed, 154 insertions(+), 97 deletions(-) diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java index 4eded1aa0b5a6..79644fac07579 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java @@ -306,6 +306,6 @@ private void getPreview( ); }, listener::onFailure); - function.deduceMappings(parentTaskClient, filteredHeaders, source, deduceMappingsListener); + function.deduceMappings(parentTaskClient, filteredHeaders, transformId, source, deduceMappingsListener); } } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportValidateTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportValidateTransformAction.java index 0f9c8e6755bee..2a450eb931324 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportValidateTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportValidateTransformAction.java @@ -130,7 +130,7 @@ protected void doExecute(Task task, Request request, ActionListener li if (request.isDeferValidation()) { deduceMappingsListener.onResponse(emptyMap()); } else { - function.deduceMappings(client, config.getHeaders(), config.getSource(), deduceMappingsListener); + function.deduceMappings(client, config.getHeaders(), config.getId(), config.getSource(), deduceMappingsListener); } }, listener::onFailure); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/Function.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/Function.java index e49fd65296825..b99b764ade099 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/Function.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/Function.java @@ -118,12 +118,14 @@ interface ChangeCollector { * * @param client a client instance for querying the source mappings * @param headers headers to be used to query only for what the caller is allowed to + * @param transformId transform id * @param sourceConfig the source configuration * @param listener listener to take the deduced mapping */ void deduceMappings( Client client, Map headers, + String transformId, SourceConfig sourceConfig, ActionListener> listener ); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/latest/Latest.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/latest/Latest.java index 3d8b61fc10b23..6c37df72769d6 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/latest/Latest.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/latest/Latest.java @@ -116,6 +116,7 @@ public List getPerformanceCriticalFields() { public void deduceMappings( Client client, Map headers, + String transformId, SourceConfig sourceConfig, ActionListener> listener ) { diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/Pivot.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/Pivot.java index e0e4bc05adbe2..0d4dbcb6c2094 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/Pivot.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/Pivot.java @@ -89,18 +89,11 @@ public List getPerformanceCriticalFields() { public void deduceMappings( Client client, Map headers, + String transformId, SourceConfig sourceConfig, final ActionListener> listener ) { - SchemaUtil.deduceMappings( - client, - headers, - config, - sourceConfig.getIndex(), - sourceConfig.getQueryConfig().getQuery(), - sourceConfig.getRuntimeMappings(), - listener - ); + SchemaUtil.deduceMappings(client, headers, transformId, settings, config, sourceConfig, listener); } /** diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/SchemaUtil.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/SchemaUtil.java index a07f5c987c30c..2ddc9d6d4abda 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/SchemaUtil.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/SchemaUtil.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.transform.transforms.pivot; +import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; @@ -18,10 +19,11 @@ import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper; -import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.transform.transforms.SettingsConfig; +import org.elasticsearch.xpack.core.transform.transforms.SourceConfig; import org.elasticsearch.xpack.core.transform.transforms.pivot.PivotConfig; import java.math.BigDecimal; @@ -98,20 +100,21 @@ public static Object dropFloatingPointComponentIfTypeRequiresIt(String type, dou * * The Listener is alerted with a {@code Map} that is a "field-name":"type" mapping * - * @param client Client from which to make requests against the cluster - * @param config The PivotConfig for which to deduce destination mapping - * @param sourceIndex Source index that contains the data to pivot - * @param sourceQuery Source index query to apply - * @param runtimeMappings Source runtime mappings + * @param client a client instance for querying the source mappings + * @param headers headers to be used to query only for what the caller is allowed to + * @param transformId id of the transform, used for logging errors + * @param settingsConfig transform settings + * @param pivotConfig The PivotConfig for which to deduce destination mapping + * @param sourceConfig The SourceConfig that contains the source data description * @param listener Listener to alert on success or failure. */ public static void deduceMappings( final Client client, final Map headers, - final PivotConfig config, - final String[] sourceIndex, - final QueryBuilder sourceQuery, - final Map runtimeMappings, + final String transformId, + final SettingsConfig settingsConfig, + final PivotConfig pivotConfig, + final SourceConfig sourceConfig, final ActionListener> listener ) { // collects the fieldnames used as source for aggregations @@ -123,7 +126,7 @@ public static void deduceMappings( // collects the target mapping types used for grouping Map fieldTypesForGrouping = new HashMap<>(); - config.getGroupConfig().getGroups().forEach((destinationFieldName, group) -> { + pivotConfig.getGroupConfig().getGroups().forEach((destinationFieldName, group) -> { // skip any fields that use scripts as there will be no source mapping if (group.getScriptConfig() != null) { return; @@ -137,7 +140,7 @@ public static void deduceMappings( } }); - for (AggregationBuilder agg : config.getAggregationConfig().getAggregatorFactories()) { + for (AggregationBuilder agg : pivotConfig.getAggregationConfig().getAggregatorFactories()) { Tuple, Map> inputAndOutputTypes = TransformAggregations.getAggregationInputAndOutputTypes( agg ); @@ -147,7 +150,7 @@ public static void deduceMappings( // For pipeline aggs, since they are referencing other aggregations in the payload, they do not have any // sourcefieldnames to put into the payload. Though, certain ones, i.e. avg_bucket, do have determinant value types - for (PipelineAggregationBuilder agg : config.getAggregationConfig().getPipelineAggregatorFactories()) { + for (PipelineAggregationBuilder agg : pivotConfig.getAggregationConfig().getPipelineAggregatorFactories()) { aggregationTypes.put(agg.getName(), agg.getType()); } @@ -158,13 +161,13 @@ public static void deduceMappings( getSourceFieldMappings( client, headers, - sourceIndex, - sourceQuery, + sourceConfig, allFieldNames.values().stream().filter(Objects::nonNull).toArray(String[]::new), - runtimeMappings, ActionListener.wrap( sourceMappings -> listener.onResponse( resolveMappings( + transformId, + Boolean.FALSE.equals(settingsConfig.getDeduceMappings()) == false, aggregationSourceFieldNames, aggregationTypes, fieldNamesForGrouping, @@ -203,6 +206,8 @@ public static void getDestinationFieldMappings( } private static Map resolveMappings( + String transformId, + boolean deduceMappings, Map aggregationSourceFieldNames, Map aggregationTypes, Map fieldNamesForGrouping, @@ -217,19 +222,32 @@ private static Map resolveMappings( String destinationMapping = TransformAggregations.resolveTargetMapping(aggregationName, sourceMapping); logger.debug( - () -> format("Deduced mapping for: [%s], agg type [%s] to [%s]", targetFieldName, aggregationName, destinationMapping) + () -> format( + "[%s] Deduced mapping for: [%s], agg type [%s] to [%s]", + transformId, + targetFieldName, + aggregationName, + destinationMapping + ) ); if (TransformAggregations.isDynamicMapping(destinationMapping)) { logger.debug( - () -> format("Dynamic target mapping set for field [%s] and aggregation [%s]", targetFieldName, aggregationName) + () -> format( + "[%s] Dynamic target mapping set for field [%s] and aggregation [%s]", + transformId, + targetFieldName, + aggregationName + ) ); } else if (destinationMapping != null) { targetMapping.put(targetFieldName, destinationMapping); } else { - logger.warn( - "Failed to deduce mapping for [{}], fall back to dynamic mapping. " + logger.log( + deduceMappings ? Level.WARN : Level.INFO, + "[{}] Failed to deduce mapping for [{}], fall back to dynamic mapping. " + "Create the destination index with complete mappings first to avoid deducing the mappings", + transformId, targetFieldName ); } @@ -237,13 +255,15 @@ private static Map resolveMappings( fieldNamesForGrouping.forEach((targetFieldName, sourceFieldName) -> { String destinationMapping = fieldTypesForGrouping.computeIfAbsent(targetFieldName, (s) -> sourceMappings.get(sourceFieldName)); - logger.debug(() -> format("Deduced mapping for: [%s] to [%s]", targetFieldName, destinationMapping)); + logger.debug(() -> format("[%s] Deduced mapping for: [%s] to [%s]", transformId, targetFieldName, destinationMapping)); if (destinationMapping != null) { targetMapping.put(targetFieldName, destinationMapping); } else { - logger.warn( - "Failed to deduce mapping for [{}], fall back to keyword. " + logger.log( + deduceMappings ? Level.WARN : Level.INFO, + "[{}] Failed to deduce mapping for [{}], fall back to keyword. " + "Create the destination index with complete mappings first to avoid deducing the mappings", + transformId, targetFieldName ); targetMapping.put(targetFieldName, KeywordFieldMapper.CONTENT_TYPE); @@ -262,20 +282,19 @@ private static Map resolveMappings( static void getSourceFieldMappings( Client client, Map headers, - String[] index, - QueryBuilder query, + SourceConfig sourceConfig, String[] fields, - Map runtimeMappings, ActionListener> listener ) { + String[] index = sourceConfig.getIndex(); if (index == null || index.length == 0 || fields == null || fields.length == 0) { listener.onResponse(Collections.emptyMap()); return; } FieldCapabilitiesRequest fieldCapabilitiesRequest = new FieldCapabilitiesRequest().indices(index) - .indexFilter(query) + .indexFilter(sourceConfig.getQueryConfig().getQuery()) .fields(fields) - .runtimeFields(runtimeMappings) + .runtimeFields(sourceConfig.getRuntimeMappings()) .indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN); ClientHelper.executeWithHeadersAsync( headers, diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationSchemaAndResultTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationSchemaAndResultTests.java index 9221dd36271f7..5943a9007fb7c 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationSchemaAndResultTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationSchemaAndResultTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.Strings; -import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilders; @@ -28,6 +27,8 @@ import org.elasticsearch.test.client.NoOpClient; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.transform.transforms.SettingsConfig; +import org.elasticsearch.xpack.core.transform.transforms.SourceConfig; import org.elasticsearch.xpack.core.transform.transforms.pivot.AggregationConfig; import org.elasticsearch.xpack.core.transform.transforms.pivot.GroupConfig; import org.elasticsearch.xpack.core.transform.transforms.pivot.GroupConfigTests; @@ -151,10 +152,10 @@ public void testBasic() throws InterruptedException { listener -> SchemaUtil.deduceMappings( client, emptyMap(), + "my-transform", + new SettingsConfig(), pivotConfig, - new String[] { "source-index" }, - QueryBuilders.matchAllQuery(), - emptyMap(), + new SourceConfig(new String[] { "source-index" }), listener ), mappings -> { @@ -231,10 +232,10 @@ public void testNested() throws InterruptedException { listener -> SchemaUtil.deduceMappings( client, emptyMap(), + "my-transform", + new SettingsConfig(), pivotConfig, - new String[] { "source-index" }, - QueryBuilders.matchAllQuery(), - emptyMap(), + new SourceConfig(new String[] { "source-index" }), listener ), mappings -> { diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/SchemaUtilTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/SchemaUtilTests.java index 525f97af356da..f6846bc065976 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/SchemaUtilTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/SchemaUtilTests.java @@ -17,25 +17,39 @@ import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.common.Strings; -import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpClient; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.transform.transforms.QueryConfig; +import org.elasticsearch.xpack.core.transform.transforms.SettingsConfig; +import org.elasticsearch.xpack.core.transform.transforms.SourceConfig; +import org.elasticsearch.xpack.core.transform.transforms.pivot.AggregationConfig; +import org.elasticsearch.xpack.core.transform.transforms.pivot.DateHistogramGroupSource; +import org.elasticsearch.xpack.core.transform.transforms.pivot.GroupConfig; +import org.elasticsearch.xpack.core.transform.transforms.pivot.PivotConfig; +import org.elasticsearch.xpack.core.transform.transforms.pivot.TermsGroupSource; import java.math.BigInteger; import java.util.Collections; import java.util.HashMap; import java.util.Map; +import java.util.Objects; +import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; import static java.util.Collections.emptyMap; +import static java.util.Collections.emptySet; import static java.util.Collections.singletonMap; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.Matchers.aMapWithSize; import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.is; @@ -98,16 +112,14 @@ public void testConvertToIntegerTypeIfNeeded() { public void testGetSourceFieldMappings() throws InterruptedException { try (var threadPool = createThreadPool()) { - final var client = new FieldCapsMockClient(threadPool); + final var client = new FieldCapsMockClient(threadPool, emptySet()); // fields is null this.>assertAsync( listener -> SchemaUtil.getSourceFieldMappings( client, emptyMap(), - new String[] { "index-1", "index-2" }, - QueryBuilders.matchAllQuery(), + new SourceConfig(new String[] { "index-1", "index-2" }), null, - emptyMap(), listener ), mappings -> { @@ -121,44 +133,8 @@ public void testGetSourceFieldMappings() throws InterruptedException { listener -> SchemaUtil.getSourceFieldMappings( client, emptyMap(), - new String[] { "index-1", "index-2" }, - QueryBuilders.matchAllQuery(), - new String[] {}, - emptyMap(), - listener - ), - mappings -> { - assertNotNull(mappings); - assertTrue(mappings.isEmpty()); - } - ); - - // indices is null - this.>assertAsync( - listener -> SchemaUtil.getSourceFieldMappings( - client, - emptyMap(), - null, - QueryBuilders.matchAllQuery(), - new String[] { "field-1", "field-2" }, - emptyMap(), - listener - ), - mappings -> { - assertNotNull(mappings); - assertTrue(mappings.isEmpty()); - } - ); - - // indices is empty - this.>assertAsync( - listener -> SchemaUtil.getSourceFieldMappings( - client, - emptyMap(), + new SourceConfig(new String[] { "index-1", "index-2" }), new String[] {}, - QueryBuilders.matchAllQuery(), - new String[] { "field-1", "field-2" }, - emptyMap(), listener ), mappings -> { @@ -172,10 +148,8 @@ public void testGetSourceFieldMappings() throws InterruptedException { listener -> SchemaUtil.getSourceFieldMappings( client, emptyMap(), - new String[] { "index-1", "index-2" }, - QueryBuilders.matchAllQuery(), + new SourceConfig(new String[] { "index-1", "index-2" }), new String[] { "field-1", "field-2" }, - emptyMap(), listener ), mappings -> { @@ -196,15 +170,13 @@ public void testGetSourceFieldMappingsWithRuntimeMappings() throws InterruptedEx } }; try (var threadPool = createThreadPool()) { - final var client = new FieldCapsMockClient(threadPool); + final var client = new FieldCapsMockClient(threadPool, emptySet()); this.>assertAsync( listener -> SchemaUtil.getSourceFieldMappings( client, emptyMap(), - new String[] { "index-1", "index-2" }, - QueryBuilders.matchAllQuery(), + new SourceConfig(new String[] { "index-1", "index-2" }, QueryConfig.matchAll(), runtimeMappings), new String[] { "field-1", "field-2" }, - runtimeMappings, listener ), mappings -> { @@ -240,9 +212,74 @@ public void testIsDateType() { assertFalse(SchemaUtil.isDateType("keyword")); } + public void testDeduceMappings_AllMappingsArePresent() throws InterruptedException { + testDeduceMappings( + emptySet(), + Map.of("by-day", "long", "by-user", "long", "by-business", "long", "timestamp", "long", "review_score", "double") + ); + } + + public void testDeduceMappings_GroupByFieldMappingIsMissing() throws InterruptedException { + testDeduceMappings( + Set.of("business_id"), + // Note that the expected mapping of the "by-business" target field is "keyword" + Map.of("by-day", "long", "by-user", "long", "by-business", "keyword", "timestamp", "long", "review_score", "double") + ); + } + + public void testDeduceMappings_AggregationFieldMappingIsMissing() throws InterruptedException { + testDeduceMappings( + Set.of("review_score"), + Map.of("by-day", "long", "by-user", "long", "by-business", "long", "timestamp", "long", "review_score", "double") + ); + } + + private void testDeduceMappings(Set fieldsWithoutMappings, Map expectedMappings) throws InterruptedException { + try (var threadPool = createThreadPool()) { + final var client = new FieldCapsMockClient(threadPool, fieldsWithoutMappings); + var groups = Map.of( + "by-day", + new DateHistogramGroupSource( + "timestamp", + null, + false, + new DateHistogramGroupSource.CalendarInterval(DateHistogramInterval.DAY), + null, + null + ), + "by-user", + new TermsGroupSource("user_id", null, false), + "by-business", + new TermsGroupSource("business_id", null, false) + ); + var aggs = AggregatorFactories.builder() + .addAggregator(AggregationBuilders.avg("review_score").field("stars")) + .addAggregator(AggregationBuilders.max("timestamp").field("timestamp")); + var groupConfig = new GroupConfig(emptyMap() /* unused anyway */, groups); + var aggregationConfig = new AggregationConfig(emptyMap() /* unused anyway */, aggs); + var pivotConfig = new PivotConfig(groupConfig, aggregationConfig, null); + this.>assertAsync( + listener -> SchemaUtil.deduceMappings( + client, + emptyMap(), + "my-transform", + new SettingsConfig.Builder().setDeduceMappings(randomBoolean() ? randomBoolean() : null).build(), + pivotConfig, + new SourceConfig(new String[] { "index-1", "index-2" }), + listener + ), + mappings -> assertThat(mappings, is(equalTo(expectedMappings))) + ); + } + } + private static class FieldCapsMockClient extends NoOpClient { - FieldCapsMockClient(ThreadPool threadPool) { + + private final Set fieldsWithoutMappings; + + FieldCapsMockClient(ThreadPool threadPool, Set fieldsWithoutMappings) { super(threadPool); + this.fieldsWithoutMappings = Objects.requireNonNull(fieldsWithoutMappings); } @SuppressWarnings("unchecked") @@ -255,7 +292,11 @@ protected void if (request instanceof FieldCapabilitiesRequest fieldCapsRequest) { Map> responseMap = new HashMap<>(); for (String field : fieldCapsRequest.fields()) { - responseMap.put(field, singletonMap(field, createFieldCapabilities(field, "long"))); + if (fieldsWithoutMappings.contains(field)) { + // If the field mappings should be missing, do **not** put it in the response. + } else { + responseMap.put(field, singletonMap(field, createFieldCapabilities(field, "long"))); + } } for (Map.Entry runtimeField : fieldCapsRequest.runtimeFields().entrySet()) { String field = runtimeField.getKey(); From 0977ba9f311c81ac43a5b812f31d878b4936ef34 Mon Sep 17 00:00:00 2001 From: Jan Kuipers <148754765+jan-elastic@users.noreply.github.com> Date: Tue, 6 Feb 2024 10:11:11 +0100 Subject: [PATCH 048/106] Fix link to test_grok_pattern api (#105174) --- .../rest-api-spec/api/text_structure.test_grok_pattern.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/text_structure.test_grok_pattern.json b/rest-api-spec/src/main/resources/rest-api-spec/api/text_structure.test_grok_pattern.json index e0361d30b5e73..b291ce6b87e8e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/text_structure.test_grok_pattern.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/text_structure.test_grok_pattern.json @@ -1,7 +1,7 @@ { "text_structure.test_grok_pattern": { "documentation": { - "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/test-grok-pattern-api.html", + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/test-grok-pattern.html", "description": "Tests a Grok pattern on some text." }, "stability": "stable", From b5f4c5e204539e56d750a3400933389a0ae78bc4 Mon Sep 17 00:00:00 2001 From: Alexander Spies Date: Tue, 6 Feb 2024 10:23:41 +0100 Subject: [PATCH 049/106] ESQL: Push CIDR_MATCH to Lucene if possible (#105061) --- docs/changelog/105061.yaml | 6 +++ .../function/scalar/ip/CIDRMatch.java | 8 +++ .../optimizer/LocalPhysicalPlanOptimizer.java | 4 ++ .../esql/planner/EsqlTranslatorHandler.java | 28 +++++++++- .../LocalPhysicalPlanOptimizerTests.java | 51 +++++++++++++++++++ 5 files changed, 96 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/105061.yaml diff --git a/docs/changelog/105061.yaml b/docs/changelog/105061.yaml new file mode 100644 index 0000000000000..ae8a36183e0e7 --- /dev/null +++ b/docs/changelog/105061.yaml @@ -0,0 +1,6 @@ +pr: 105061 +summary: "ESQL: Push CIDR_MATCH to Lucene if possible" +area: ES|QL +type: bug +issues: + - 105042 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java index 80d306fdc4fda..bed4aab93dd53 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java @@ -61,6 +61,14 @@ public CIDRMatch( this.matches = matches; } + public Expression ipField() { + return ipField; + } + + public List matches() { + return matches; + } + @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var ipEvaluatorSupplier = toEvaluator.apply(ipField); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java index 9073d3935852f..e8c05338e7596 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.InsensitiveBinaryComparison; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEquals; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; +import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.esql.optimizer.PhysicalOptimizerRules.OptimizerRule; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; @@ -251,6 +252,9 @@ public static boolean canPushToSource(Expression exp, Predicate if (usf instanceof RegexMatch || usf instanceof IsNull || usf instanceof IsNotNull) { return isAttributePushable(usf.field(), usf, hasIdenticalDelegate); } + } else if (exp instanceof CIDRMatch cidrMatch) { + return isAttributePushable(cidrMatch.ipField(), cidrMatch, hasIdenticalDelegate) + && Expressions.foldable(cidrMatch.matches()); } return false; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlTranslatorHandler.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlTranslatorHandler.java index 4dd61def0b2c3..c610421890fc4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlTranslatorHandler.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlTranslatorHandler.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.InsensitiveEquals; +import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; import org.elasticsearch.xpack.ql.QlIllegalArgumentException; @@ -37,6 +38,7 @@ import org.elasticsearch.xpack.ql.querydsl.query.MatchAll; import org.elasticsearch.xpack.ql.querydsl.query.Query; import org.elasticsearch.xpack.ql.querydsl.query.TermQuery; +import org.elasticsearch.xpack.ql.querydsl.query.TermsQuery; import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; @@ -44,7 +46,9 @@ import java.math.BigDecimal; import java.math.BigInteger; +import java.util.LinkedHashSet; import java.util.List; +import java.util.Set; import java.util.function.Supplier; import static org.elasticsearch.xpack.ql.type.DataTypes.UNSIGNED_LONG; @@ -66,7 +70,7 @@ public final class EsqlTranslatorHandler extends QlTranslatorHandler { new ExpressionTranslators.StringQueries(), new ExpressionTranslators.Matches(), new ExpressionTranslators.MultiMatches(), - new ExpressionTranslators.Scalars() + new Scalars() ); @Override @@ -245,4 +249,26 @@ private static boolean isInRange(DataType numericFieldDataType, DataType valueDa return minValue.compareTo(decimalValue) <= 0 && maxValue.compareTo(decimalValue) >= 0; } } + + public static class Scalars extends ExpressionTranslator { + @Override + protected Query asQuery(ScalarFunction f, TranslatorHandler handler) { + return doTranslate(f, handler); + } + + public static Query doTranslate(ScalarFunction f, TranslatorHandler handler) { + if (f instanceof CIDRMatch cm) { + if (cm.ipField() instanceof FieldAttribute fa && Expressions.foldable(cm.matches())) { + String targetFieldName = handler.nameOf(fa.exactAttribute()); + Set set = new LinkedHashSet<>(Expressions.fold(cm.matches())); + + Query query = new TermsQuery(f.source(), targetFieldName, set); + // CIDR_MATCH applies only to single values. + return handler.wrapFunctionQuery(f, cm.ipField(), () -> query); + } + } + + return ExpressionTranslators.Scalars.doTranslate(f, handler); + } + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index 7321799efd705..7950cf0f1d335 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -9,6 +9,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.query.QueryBuilder; @@ -54,10 +55,13 @@ import org.elasticsearch.xpack.ql.type.EsField; import org.junit.Before; +import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; import static java.util.Arrays.asList; +import static org.elasticsearch.common.logging.LoggerMessageFormat.format; import static org.elasticsearch.xpack.esql.EsqlTestUtils.as; import static org.elasticsearch.xpack.esql.EsqlTestUtils.configuration; import static org.elasticsearch.xpack.esql.EsqlTestUtils.loadMapping; @@ -415,6 +419,44 @@ public void testIsNullPushdownFilter() { assertThat(query.query().toString(), is(expected.toString())); } + /** + * Expects + * LimitExec[500[INTEGER]] + * \_ExchangeExec[[],false] + * \_ProjectExec[[!alias_integer, boolean{f}#4, byte{f}#5, constant_keyword-foo{f}#6, date{f}#7, double{f}#8, float{f}#9, + * half_float{f}#10, integer{f}#12, ip{f}#13, keyword{f}#14, long{f}#15, scaled_float{f}#11, short{f}#17, text{f}#18, + * unsigned_long{f}#16, version{f}#19, wildcard{f}#20]] + * \_FieldExtractExec[!alias_integer, boolean{f}#4, byte{f}#5, constant_k..][] + * \_EsQueryExec[test], query[{"esql_single_value":{"field":"ip","next":{"terms":{"ip":["127.0.0.0/24"],"boost":1.0}},"source": + * "cidr_match(ip, \"127.0.0.0/24\")@1:19"}}][_doc{f}#21], limit[500], sort[] estimatedRowSize[389] + */ + public void testCidrMatchPushdownFilter() { + var allTypeMappingAnalyzer = makeAnalyzer("mapping-ip.json", new EnrichResolution()); + final String fieldName = "ip_addr"; + + int cidrBlockCount = randomIntBetween(1, 10); + ArrayList cidrBlocks = new ArrayList<>(); + for (int i = 0; i < cidrBlockCount; i++) { + cidrBlocks.add(randomCidrBlock()); + } + String cidrBlocksString = cidrBlocks.stream().map((s) -> "\"" + s + "\"").collect(Collectors.joining(",")); + String cidrMatch = format(null, "cidr_match({}, {})", fieldName, cidrBlocksString); + + var query = "from test | where " + cidrMatch; + var plan = plan(query, EsqlTestUtils.TEST_SEARCH_STATS, allTypeMappingAnalyzer); + + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + var field = as(project.child(), FieldExtractExec.class); + var queryExec = as(field.child(), EsQueryExec.class); + assertThat(queryExec.limit().fold(), is(500)); + + var expectedInnerQuery = QueryBuilders.termsQuery(fieldName, cidrBlocks); + var expectedQuery = wrapWithSingleQuery(expectedInnerQuery, fieldName, new Source(1, 18, cidrMatch)); + assertThat(queryExec.query().toString(), is(expectedQuery.toString())); + } + private record OutOfRangeTestCase(String fieldName, String tooLow, String tooHigh) {}; public void testOutOfRangeFilterPushdown() { @@ -621,4 +663,13 @@ private PhysicalPlan physicalPlan(String query, Analyzer analyzer) { protected List filteredWarnings() { return withDefaultLimitWarning(super.filteredWarnings()); } + + private String randomCidrBlock() { + boolean ipv4 = randomBoolean(); + + String address = NetworkAddress.format(randomIp(ipv4)); + int cidrPrefixLength = ipv4 ? randomIntBetween(0, 32) : randomIntBetween(0, 128); + + return format(null, "{}/{}", address, cidrPrefixLength); + } } From 0c8ceb3d1e61774a0b5bd0abad5c99d4aec8d0ea Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Tue, 6 Feb 2024 09:35:22 +0000 Subject: [PATCH 050/106] Fix reference to updated task name --- build-tools-internal/src/main/groovy/elasticsearch.ide.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle index 683a2d5604055..b6996b7493f54 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle @@ -114,7 +114,7 @@ if (providers.systemProperty('idea.active').getOrNull() == 'true') { description = 'Builds artifacts needed as dependency for IDE modules' dependsOn([':client:rest-high-level:shadowJar', ':plugins:repository-hdfs:hadoop-client-api:shadowJar', - ':libs:elasticsearch-x-content:generateProviderImpl', + ':libs:elasticsearch-x-content:generateImplProviderImpl', ':x-pack:plugin:esql:compute:ann:jar', ':x-pack:plugin:esql:compute:gen:jar', ':server:generateModulesList', From 2c83881a0b918054557a565ae97ddfa605f8a06a Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Tue, 6 Feb 2024 10:36:51 +0100 Subject: [PATCH 051/106] [DOCS][ESQL] Document locale rest parameter (#104985) --- docs/reference/esql/esql-query-api.asciidoc | 4 +++ docs/reference/esql/esql-rest.asciidoc | 27 +++++++++++++++++++++ 2 files changed, 31 insertions(+) diff --git a/docs/reference/esql/esql-query-api.asciidoc b/docs/reference/esql/esql-query-api.asciidoc index e1e27be12a36f..d7fa25a5a8d4f 100644 --- a/docs/reference/esql/esql-query-api.asciidoc +++ b/docs/reference/esql/esql-query-api.asciidoc @@ -67,6 +67,10 @@ precedence. `false`. The API only supports this parameter for CBOR, JSON, SMILE, and YAML responses. See <>. +`locale`:: +(Optional, string) Returns results (especially dates) formatted per the conventions of the locale. +For syntax, refer to <>. + `params`:: (Optional, array) Values for parameters in the `query`. For syntax, refer to <>. diff --git a/docs/reference/esql/esql-rest.asciidoc b/docs/reference/esql/esql-rest.asciidoc index d66ceb2eb4f1e..fc06cfea904af 100644 --- a/docs/reference/esql/esql-rest.asciidoc +++ b/docs/reference/esql/esql-rest.asciidoc @@ -204,6 +204,33 @@ Which returns: } ---- +[discrete] +[[esql-locale-param]] +==== Returning localized results + +Use the `locale` parameter in the request body to return results (especially dates) formatted per the conventions of the locale. +If `locale` is not specified, defaults to `en-US` (English). +Refer to https://www.oracle.com/java/technologies/javase/jdk17-suported-locales.html[JDK Supported Locales]. + +Syntax: the `locale` parameter accepts language tags in the (case-insensitive) format `xy` and `xy-XY`. + +For example, to return a month name in French: + +[source,console] +---- +POST /_query +{ + "locale": "fr-FR", + "query": """ + ROW birth_date_string = "2023-01-15T00:00:00.000Z" + | EVAL birth_date = date_parse(birth_date_string) + | EVAL month_of_birth = DATE_FORMAT("MMMM",birth_date) + | LIMIT 5 + """ +} +---- +// TEST[setup:library] + [discrete] [[esql-rest-params]] ==== Passing parameters to a query From b06ac7d6a45bd61ccf455a22fc63ba46a4618ab3 Mon Sep 17 00:00:00 2001 From: Dmitry Cherniachenko <2sabio@gmail.com> Date: Tue, 6 Feb 2024 10:45:22 +0100 Subject: [PATCH 052/106] Really drop symlink env test on Windows (#105134) Rectification of #102762 --- .../test/java/org/elasticsearch/env/NodeEnvironmentTests.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java b/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java index 9fd048cd4d2a7..ce0597e7169a4 100644 --- a/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java +++ b/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java @@ -645,8 +645,8 @@ public void testSymlinkDataDirectory() throws Exception { try { Files.createSymbolicLink(symLinkPath, dataPath); } catch (FileSystemException e) { - if (IOUtils.WINDOWS && e.getMessage().equals("A required privilege is not held by the client")) { - throw new AssumptionViolatedException("Symlinks on windows needs admin privileges", e); + if (IOUtils.WINDOWS && "A required privilege is not held by the client".equals(e.getReason())) { + throw new AssumptionViolatedException("Symlinks on Windows need admin privileges", e); } else { throw e; } From 0cec48f50e36713040a11284e9d8705d11280a55 Mon Sep 17 00:00:00 2001 From: Felix Barnsteiner Date: Tue, 6 Feb 2024 11:24:43 +0100 Subject: [PATCH 053/106] TSDB dimensions encoding (#99747) Adds a run-length encoding for TSDB that takes advantage of the fact that doc_values for dimensions are sorted by _tsid, which implies that consecutive values for the same field are mostly the same in a block. --- docs/changelog/99747.yaml | 5 + .../index/codec/PerFieldMapperCodec.java | 38 +- .../codec/tsdb/ES87TSDBDocValuesConsumer.java | 365 +++++++++- .../codec/tsdb/ES87TSDBDocValuesEncoder.java | 71 ++ .../codec/tsdb/ES87TSDBDocValuesFormat.java | 8 + .../codec/tsdb/ES87TSDBDocValuesProducer.java | 649 +++++++++++++++++- .../tsdb/ES87TSDBDocValuesEncoderTests.java | 94 +++ .../tsdb/ES87TSDBDocValuesFormatTests.java | 335 +++------ 8 files changed, 1255 insertions(+), 310 deletions(-) create mode 100644 docs/changelog/99747.yaml diff --git a/docs/changelog/99747.yaml b/docs/changelog/99747.yaml new file mode 100644 index 0000000000000..94aefbf25d8e5 --- /dev/null +++ b/docs/changelog/99747.yaml @@ -0,0 +1,5 @@ +pr: 99747 +summary: TSDB dimensions encoding +area: TSDB +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java index 852547ecb1073..0e2b50257ae37 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java @@ -21,13 +21,14 @@ import org.elasticsearch.index.codec.bloomfilter.ES87BloomFilterPostingsFormat; import org.elasticsearch.index.codec.postings.ES812PostingsFormat; import org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat; +import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.IdFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.NumberFieldMapper; -import org.elasticsearch.index.mapper.TimeSeriesParams; +import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; /** @@ -110,33 +111,26 @@ public DocValuesFormat getDocValuesFormatForField(String field) { } boolean useTSDBDocValuesFormat(final String field) { - return mapperService.getIndexSettings().isES87TSDBCodecEnabled() - && isTimeSeriesModeIndex() - && isNotSpecialField(field) - && (isCounterOrGaugeMetricType(field) || isTimestampField(field)); - } - - private boolean isTimeSeriesModeIndex() { - return IndexMode.TIME_SERIES.equals(mapperService.getIndexSettings().getMode()); - } - - private boolean isCounterOrGaugeMetricType(String field) { - if (mapperService != null) { + if (mapperService != null && mapperService.getIndexSettings().isES87TSDBCodecEnabled() && isTimeSeriesModeIndex()) { final MappingLookup mappingLookup = mapperService.mappingLookup(); if (mappingLookup.getMapper(field) instanceof NumberFieldMapper) { - final MappedFieldType fieldType = mappingLookup.getFieldType(field); - return TimeSeriesParams.MetricType.COUNTER.equals(fieldType.getMetricType()) - || TimeSeriesParams.MetricType.GAUGE.equals(fieldType.getMetricType()); + return true; + } + if (mappingLookup.getMapper(field) instanceof DateFieldMapper) { + return true; + } + if (mappingLookup.getMapper(field) instanceof KeywordFieldMapper) { + return true; + } + if (mappingLookup.getMapper(field) instanceof TimeSeriesIdFieldMapper) { + return true; } } return false; } - private static boolean isTimestampField(String field) { - return "@timestamp".equals(field); + private boolean isTimeSeriesModeIndex() { + return IndexMode.TIME_SERIES == mapperService.getIndexSettings().getMode(); } - private static boolean isNotSpecialField(String field) { - return field.startsWith("_") == false; - } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java index 17bdcbbfb0739..15dc386f41284 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java @@ -16,25 +16,43 @@ import org.apache.lucene.index.EmptyDocValuesProducer; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SegmentWriteState; +import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.SortedSetSelector; +import org.apache.lucene.store.ByteArrayDataOutput; import org.apache.lucene.store.ByteBuffersDataOutput; import org.apache.lucene.store.ByteBuffersIndexOutput; import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.util.ArrayUtil; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.LongsRef; +import org.apache.lucene.util.StringHelper; +import org.apache.lucene.util.compress.LZ4; import org.apache.lucene.util.packed.DirectMonotonicWriter; +import org.apache.lucene.util.packed.PackedInts; import org.elasticsearch.core.IOUtils; import java.io.IOException; import java.util.Arrays; +import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.DIRECT_MONOTONIC_BLOCK_SHIFT; +import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.SORTED_SET; + final class ES87TSDBDocValuesConsumer extends DocValuesConsumer { IndexOutput data, meta; final int maxDoc; + private byte[] termsDictBuffer; ES87TSDBDocValuesConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) throws IOException { + this.termsDictBuffer = new byte[1 << 14]; boolean success = false; try { final String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension); @@ -68,15 +86,15 @@ final class ES87TSDBDocValuesConsumer extends DocValuesConsumer { public void addNumericField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { meta.writeInt(field.number); meta.writeByte(ES87TSDBDocValuesFormat.NUMERIC); - writeNumericField(field, new EmptyDocValuesProducer() { + writeField(field, new EmptyDocValuesProducer() { @Override public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException { return DocValues.singleton(valuesProducer.getNumeric(field)); } - }); + }, -1); } - private long[] writeNumericField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + private long[] writeField(FieldInfo field, DocValuesProducer valuesProducer, long maxOrd) throws IOException { int numDocsWithValue = 0; long numValues = 0; @@ -109,7 +127,8 @@ private long[] writeNumericField(FieldInfo field, DocValuesProducer valuesProduc meta.writeLong(numValues); if (numValues > 0) { - meta.writeInt(ES87TSDBDocValuesFormat.DIRECT_MONOTONIC_BLOCK_SHIFT); + // Special case for maxOrd of 1, signal -1 that no blocks will be written + meta.writeInt(maxOrd != 1 ? ES87TSDBDocValuesFormat.DIRECT_MONOTONIC_BLOCK_SHIFT : -1); final ByteBuffersDataOutput indexOut = new ByteBuffersDataOutput(); final DirectMonotonicWriter indexWriter = DirectMonotonicWriter.getInstance( meta, @@ -118,32 +137,46 @@ private long[] writeNumericField(FieldInfo field, DocValuesProducer valuesProduc ES87TSDBDocValuesFormat.DIRECT_MONOTONIC_BLOCK_SHIFT ); - final long[] buffer = new long[ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE]; - int bufferSize = 0; final long valuesDataOffset = data.getFilePointer(); - final ES87TSDBDocValuesEncoder encoder = new ES87TSDBDocValuesEncoder(); - - values = valuesProducer.getSortedNumeric(field); - for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { - final int count = values.docValueCount(); - for (int i = 0; i < count; ++i) { - buffer[bufferSize++] = values.nextValue(); - if (bufferSize == ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE) { - indexWriter.add(data.getFilePointer() - valuesDataOffset); + // Special case for maxOrd of 1, skip writing the blocks + if (maxOrd != 1) { + final long[] buffer = new long[ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE]; + int bufferSize = 0; + final ES87TSDBDocValuesEncoder encoder = new ES87TSDBDocValuesEncoder(); + values = valuesProducer.getSortedNumeric(field); + final int bitsPerOrd = maxOrd >= 0 ? PackedInts.bitsRequired(maxOrd - 1) : -1; + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + final int count = values.docValueCount(); + for (int i = 0; i < count; ++i) { + buffer[bufferSize++] = values.nextValue(); + if (bufferSize == ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE) { + indexWriter.add(data.getFilePointer() - valuesDataOffset); + if (maxOrd >= 0) { + encoder.encodeOrdinals(buffer, data, bitsPerOrd); + } else { + encoder.encode(buffer, data); + } + bufferSize = 0; + } + } + } + if (bufferSize > 0) { + indexWriter.add(data.getFilePointer() - valuesDataOffset); + // Fill unused slots in the block with zeroes rather than junk + Arrays.fill(buffer, bufferSize, ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE, 0L); + if (maxOrd >= 0) { + encoder.encodeOrdinals(buffer, data, bitsPerOrd); + } else { encoder.encode(buffer, data); - bufferSize = 0; } } } - if (bufferSize > 0) { - indexWriter.add(data.getFilePointer() - valuesDataOffset); - // Fill unused slots in the block with zeroes rather than junk - Arrays.fill(buffer, bufferSize, ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE, 0L); - encoder.encode(buffer, data); - } final long valuesDataLength = data.getFilePointer() - valuesDataOffset; - indexWriter.finish(); + if (maxOrd != 1) { + // Special case for maxOrd of 1, indexWriter isn't really used, so no need to invoke finish() method. + indexWriter.finish(); + } final long indexDataOffset = data.getFilePointer(); data.copyBytes(indexOut.toDataInput(), indexOut.size()); meta.writeLong(indexDataOffset); @@ -163,18 +196,205 @@ public void addBinaryField(FieldInfo field, DocValuesProducer valuesProducer) th @Override public void addSortedField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { - throw new UnsupportedOperationException("Unsupported sorted doc values for field [" + field.name + "]"); + meta.writeInt(field.number); + meta.writeByte(ES87TSDBDocValuesFormat.SORTED); + doAddSortedField(field, valuesProducer); + } + + private void doAddSortedField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + SortedDocValues sorted = valuesProducer.getSorted(field); + int maxOrd = sorted.getValueCount(); + writeField(field, new EmptyDocValuesProducer() { + @Override + public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException { + SortedDocValues sorted = valuesProducer.getSorted(field); + NumericDocValues sortedOrds = new NumericDocValues() { + @Override + public long longValue() throws IOException { + return sorted.ordValue(); + } + + @Override + public boolean advanceExact(int target) throws IOException { + return sorted.advanceExact(target); + } + + @Override + public int docID() { + return sorted.docID(); + } + + @Override + public int nextDoc() throws IOException { + return sorted.nextDoc(); + } + + @Override + public int advance(int target) throws IOException { + return sorted.advance(target); + } + + @Override + public long cost() { + return sorted.cost(); + } + }; + return DocValues.singleton(sortedOrds); + } + }, maxOrd); + addTermsDict(DocValues.singleton(valuesProducer.getSorted(field))); + } + + private void addTermsDict(SortedSetDocValues values) throws IOException { + final long size = values.getValueCount(); + meta.writeVLong(size); + + int blockMask = ES87TSDBDocValuesFormat.TERMS_DICT_BLOCK_LZ4_MASK; + int shift = ES87TSDBDocValuesFormat.TERMS_DICT_BLOCK_LZ4_SHIFT; + + meta.writeInt(DIRECT_MONOTONIC_BLOCK_SHIFT); + ByteBuffersDataOutput addressBuffer = new ByteBuffersDataOutput(); + ByteBuffersIndexOutput addressOutput = new ByteBuffersIndexOutput(addressBuffer, "temp", "temp"); + long numBlocks = (size + blockMask) >>> shift; + DirectMonotonicWriter writer = DirectMonotonicWriter.getInstance(meta, addressOutput, numBlocks, DIRECT_MONOTONIC_BLOCK_SHIFT); + + BytesRefBuilder previous = new BytesRefBuilder(); + long ord = 0; + long start = data.getFilePointer(); + int maxLength = 0, maxBlockLength = 0; + TermsEnum iterator = values.termsEnum(); + + LZ4.FastCompressionHashTable ht = new LZ4.FastCompressionHashTable(); + ByteArrayDataOutput bufferedOutput = new ByteArrayDataOutput(termsDictBuffer); + int dictLength = 0; + + for (BytesRef term = iterator.next(); term != null; term = iterator.next()) { + if ((ord & blockMask) == 0) { + if (ord != 0) { + // flush the previous block + final int uncompressedLength = compressAndGetTermsDictBlockLength(bufferedOutput, dictLength, ht); + maxBlockLength = Math.max(maxBlockLength, uncompressedLength); + bufferedOutput.reset(termsDictBuffer); + } + + writer.add(data.getFilePointer() - start); + // Write the first term both to the index output, and to the buffer where we'll use it as a + // dictionary for compression + data.writeVInt(term.length); + data.writeBytes(term.bytes, term.offset, term.length); + bufferedOutput = maybeGrowBuffer(bufferedOutput, term.length); + bufferedOutput.writeBytes(term.bytes, term.offset, term.length); + dictLength = term.length; + } else { + final int prefixLength = StringHelper.bytesDifference(previous.get(), term); + final int suffixLength = term.length - prefixLength; + assert suffixLength > 0; // terms are unique + // Will write (suffixLength + 1 byte + 2 vint) bytes. Grow the buffer in need. + bufferedOutput = maybeGrowBuffer(bufferedOutput, suffixLength + 11); + bufferedOutput.writeByte((byte) (Math.min(prefixLength, 15) | (Math.min(15, suffixLength - 1) << 4))); + if (prefixLength >= 15) { + bufferedOutput.writeVInt(prefixLength - 15); + } + if (suffixLength >= 16) { + bufferedOutput.writeVInt(suffixLength - 16); + } + bufferedOutput.writeBytes(term.bytes, term.offset + prefixLength, suffixLength); + } + maxLength = Math.max(maxLength, term.length); + previous.copyBytes(term); + ++ord; + } + // Compress and write out the last block + if (bufferedOutput.getPosition() > dictLength) { + final int uncompressedLength = compressAndGetTermsDictBlockLength(bufferedOutput, dictLength, ht); + maxBlockLength = Math.max(maxBlockLength, uncompressedLength); + } + + writer.finish(); + meta.writeInt(maxLength); + // Write one more int for storing max block length. + meta.writeInt(maxBlockLength); + meta.writeLong(start); + meta.writeLong(data.getFilePointer() - start); + start = data.getFilePointer(); + addressBuffer.copyTo(data); + meta.writeLong(start); + meta.writeLong(data.getFilePointer() - start); + + // Now write the reverse terms index + writeTermsIndex(values); + } + + private int compressAndGetTermsDictBlockLength(ByteArrayDataOutput bufferedOutput, int dictLength, LZ4.FastCompressionHashTable ht) + throws IOException { + int uncompressedLength = bufferedOutput.getPosition() - dictLength; + data.writeVInt(uncompressedLength); + LZ4.compressWithDictionary(termsDictBuffer, 0, dictLength, uncompressedLength, data, ht); + return uncompressedLength; + } + + private ByteArrayDataOutput maybeGrowBuffer(ByteArrayDataOutput bufferedOutput, int termLength) { + int pos = bufferedOutput.getPosition(), originalLength = termsDictBuffer.length; + if (pos + termLength >= originalLength - 1) { + termsDictBuffer = ArrayUtil.grow(termsDictBuffer, originalLength + termLength); + bufferedOutput = new ByteArrayDataOutput(termsDictBuffer, pos, termsDictBuffer.length - pos); + } + return bufferedOutput; + } + + private void writeTermsIndex(SortedSetDocValues values) throws IOException { + final long size = values.getValueCount(); + meta.writeInt(ES87TSDBDocValuesFormat.TERMS_DICT_REVERSE_INDEX_SHIFT); + long start = data.getFilePointer(); + + long numBlocks = 1L + ((size + ES87TSDBDocValuesFormat.TERMS_DICT_REVERSE_INDEX_MASK) + >>> ES87TSDBDocValuesFormat.TERMS_DICT_REVERSE_INDEX_SHIFT); + ByteBuffersDataOutput addressBuffer = new ByteBuffersDataOutput(); + DirectMonotonicWriter writer; + try (ByteBuffersIndexOutput addressOutput = new ByteBuffersIndexOutput(addressBuffer, "temp", "temp")) { + writer = DirectMonotonicWriter.getInstance(meta, addressOutput, numBlocks, DIRECT_MONOTONIC_BLOCK_SHIFT); + TermsEnum iterator = values.termsEnum(); + BytesRefBuilder previous = new BytesRefBuilder(); + long offset = 0; + long ord = 0; + for (BytesRef term = iterator.next(); term != null; term = iterator.next()) { + if ((ord & ES87TSDBDocValuesFormat.TERMS_DICT_REVERSE_INDEX_MASK) == 0) { + writer.add(offset); + final int sortKeyLength; + if (ord == 0) { + // no previous term: no bytes to write + sortKeyLength = 0; + } else { + sortKeyLength = StringHelper.sortKeyLength(previous.get(), term); + } + offset += sortKeyLength; + data.writeBytes(term.bytes, term.offset, sortKeyLength); + } else if ((ord + & ES87TSDBDocValuesFormat.TERMS_DICT_REVERSE_INDEX_MASK) == ES87TSDBDocValuesFormat.TERMS_DICT_REVERSE_INDEX_MASK) { + previous.copyBytes(term); + } + ++ord; + } + writer.add(offset); + writer.finish(); + meta.writeLong(start); + meta.writeLong(data.getFilePointer() - start); + start = data.getFilePointer(); + addressBuffer.copyTo(data); + meta.writeLong(start); + meta.writeLong(data.getFilePointer() - start); + } } @Override public void addSortedNumericField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { meta.writeInt(field.number); meta.writeByte(ES87TSDBDocValuesFormat.SORTED_NUMERIC); - writeSortedNumericField(field, valuesProducer); + writeSortedNumericField(field, valuesProducer, -1); } - private void writeSortedNumericField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { - long[] stats = writeNumericField(field, valuesProducer); + private void writeSortedNumericField(FieldInfo field, DocValuesProducer valuesProducer, long maxOrd) throws IOException { + long[] stats = writeField(field, valuesProducer, maxOrd); int numDocsWithField = Math.toIntExact(stats[0]); long numValues = stats[1]; assert numValues >= numDocsWithField; @@ -203,9 +423,98 @@ private void writeSortedNumericField(FieldInfo field, DocValuesProducer valuesPr } } + private static boolean isSingleValued(SortedSetDocValues values) throws IOException { + if (DocValues.unwrapSingleton(values) != null) { + return true; + } + + assert values.docID() == -1; + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + int docValueCount = values.docValueCount(); + assert docValueCount > 0; + if (docValueCount > 1) { + return false; + } + } + return true; + } + @Override public void addSortedSetField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { - throw new UnsupportedOperationException("Unsupported sorted set doc values for field [" + field.name + "]"); + meta.writeInt(field.number); + meta.writeByte(SORTED_SET); + + if (isSingleValued(valuesProducer.getSortedSet(field))) { + meta.writeByte((byte) 0); // multiValued (0 = singleValued) + doAddSortedField(field, new EmptyDocValuesProducer() { + @Override + public SortedDocValues getSorted(FieldInfo field) throws IOException { + return SortedSetSelector.wrap(valuesProducer.getSortedSet(field), SortedSetSelector.Type.MIN); + } + }); + return; + } + meta.writeByte((byte) 1); // multiValued (1 = multiValued) + + SortedSetDocValues values = valuesProducer.getSortedSet(field); + long maxOrd = values.getValueCount(); + writeSortedNumericField(field, new EmptyDocValuesProducer() { + @Override + public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException { + SortedSetDocValues values = valuesProducer.getSortedSet(field); + return new SortedNumericDocValues() { + + long[] ords = LongsRef.EMPTY_LONGS; + int i, docValueCount; + + @Override + public long nextValue() throws IOException { + return ords[i++]; + } + + @Override + public int docValueCount() { + return docValueCount; + } + + @Override + public boolean advanceExact(int target) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public int docID() { + return values.docID(); + } + + @Override + public int nextDoc() throws IOException { + int doc = values.nextDoc(); + if (doc != NO_MORE_DOCS) { + docValueCount = values.docValueCount(); + ords = ArrayUtil.grow(ords, docValueCount); + for (int j = 0; j < docValueCount; j++) { + ords[j] = values.nextOrd(); + } + i = 0; + } + return doc; + } + + @Override + public int advance(int target) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public long cost() { + return values.cost(); + } + }; + } + }, maxOrd); + + addTermsDict(valuesProducer.getSortedSet(field)); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesEncoder.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesEncoder.java index e3877e65581f2..f293eb86141b6 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesEncoder.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesEncoder.java @@ -178,6 +178,77 @@ void encode(long[] in, DataOutput out) throws IOException { deltaEncode(0, 0, in, out); } + /** + * Optimizes for encoding sorted fields where we expect a block to mostly either be the same value + * or to make a transition from one value to a second one. + *

+ * Encodes blocks in the following format: + *

    + *
  • byte 0: 1/2 bits header+6/7 bits data
  • + *
  • byte 1..n: data
  • + *
+ * The header (first 1 or 2 bits) describes how the data is encoded: + *
    + *
  • ?0 block has a single value (vlong), 2nd bit already contains data
  • + *
  • + * 01 block has two runs, data contains value 1 (vlong), run-length (vint) of value 1, + * and delta from first to second value (zlong) + *
  • + *
  • 11 block is bit-packed
  • + *
+ */ + void encodeOrdinals(long[] in, DataOutput out, int bitsPerOrd) throws IOException { + assert in.length == ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE; + int numRuns = 1; + for (int i = 1; i < in.length; ++i) { + if (in[i - 1] != in[i]) { + numRuns++; + } + } + if (numRuns == 1 && bitsPerOrd < 63) { + long value = in[0]; + // set first bit to 0 to indicate the block has a single run + out.writeVLong(value << 1); + } else if (numRuns == 2 && bitsPerOrd < 62) { + // set first two bits to 01 to indicate the block has two runs + out.writeVLong((in[0] << 2) | 0b01); + int firstRunLen = in.length; + for (int i = 1; i < in.length; ++i) { + if (in[i] != in[0]) { + firstRunLen = i; + break; + } + } + out.writeVInt(firstRunLen); + out.writeZLong(in[in.length - 1] - in[0]); + } else { + // set first two bits to 11 to indicate the block is bit-packed + out.writeVLong(0b11); + forUtil.encode(in, bitsPerOrd, out); + } + } + + void decodeOrdinals(DataInput in, long[] out, int bitsPerOrd) throws IOException { + assert out.length == ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE : out.length; + + long v1 = in.readVLong(); + int header = (int) (v1 & 0b11L); + if (header == 0b00 || header == 0b10) { + // first bit is zero -> single run + Arrays.fill(out, v1 >>> 1); + } else if (header == 0b01) { + // first two bits are 01 -> two runs + v1 = v1 >>> 2; + int runLen = in.readVInt(); + long v2 = v1 + in.readZLong(); + Arrays.fill(out, 0, runLen, v1); + Arrays.fill(out, runLen, out.length, v2); + } else { + // first two bits are 11 -> bit-packed + forUtil.decode(bitsPerOrd, in, out); + } + } + /** Decode longs that have been encoded with {@link #encode}. */ void decode(DataInput in, long[] out) throws IOException { assert out.length == ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SIZE : out.length; diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormat.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormat.java index d8b2ea8b677b8..c5f597f27eb98 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormat.java @@ -34,6 +34,14 @@ public class ES87TSDBDocValuesFormat extends org.apache.lucene.codecs.DocValuesF static final byte SORTED_SET = 3; static final byte SORTED_NUMERIC = 4; + static final int TERMS_DICT_BLOCK_LZ4_SHIFT = 6; + static final int TERMS_DICT_BLOCK_LZ4_SIZE = 1 << TERMS_DICT_BLOCK_LZ4_SHIFT; + static final int TERMS_DICT_BLOCK_LZ4_MASK = TERMS_DICT_BLOCK_LZ4_SIZE - 1; + + static final int TERMS_DICT_REVERSE_INDEX_SHIFT = 10; + static final int TERMS_DICT_REVERSE_INDEX_SIZE = 1 << TERMS_DICT_REVERSE_INDEX_SHIFT; + static final int TERMS_DICT_REVERSE_INDEX_MASK = TERMS_DICT_REVERSE_INDEX_SIZE - 1; + public ES87TSDBDocValuesFormat() { super(CODEC_NAME); } diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java index 4c691d84e2b4d..a06227351473a 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java @@ -11,30 +11,43 @@ import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.codecs.DocValuesProducer; import org.apache.lucene.codecs.lucene90.IndexedDISI; +import org.apache.lucene.index.BaseTermsEnum; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.DocValues; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; +import org.apache.lucene.index.ImpactsEnum; import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.ChecksumIndexInput; +import org.apache.lucene.store.DataInput; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.RandomAccessInput; +import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LongValues; +import org.apache.lucene.util.compress.LZ4; import org.apache.lucene.util.packed.DirectMonotonicReader; +import org.apache.lucene.util.packed.PackedInts; import org.elasticsearch.core.IOUtils; import java.io.IOException; import java.util.HashMap; import java.util.Map; +import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.TERMS_DICT_BLOCK_LZ4_SHIFT; + public class ES87TSDBDocValuesProducer extends DocValuesProducer { private final Map numerics = new HashMap<>(); + private final Map sorted = new HashMap<>(); + private final Map sortedSets = new HashMap<>(); private final Map sortedNumerics = new HashMap<>(); private final IndexInput data; private final int maxDoc; @@ -101,7 +114,7 @@ public class ES87TSDBDocValuesProducer extends DocValuesProducer { @Override public NumericDocValues getNumeric(FieldInfo field) throws IOException { NumericEntry entry = numerics.get(field.name); - return getNumeric(entry); + return getNumeric(entry, -1); } @Override @@ -111,18 +124,433 @@ public BinaryDocValues getBinary(FieldInfo field) throws IOException { @Override public SortedDocValues getSorted(FieldInfo field) throws IOException { - throw new UnsupportedOperationException("Unsupported sorted doc values for field [" + field.name + "]"); + SortedEntry entry = sorted.get(field.name); + return getSorted(entry); + } + + private SortedDocValues getSorted(SortedEntry entry) throws IOException { + final NumericDocValues ords = getNumeric(entry.ordsEntry, entry.termsDictEntry.termsDictSize); + return new BaseSortedDocValues(entry) { + + @Override + public int ordValue() throws IOException { + return (int) ords.longValue(); + } + + @Override + public boolean advanceExact(int target) throws IOException { + return ords.advanceExact(target); + } + + @Override + public int docID() { + return ords.docID(); + } + + @Override + public int nextDoc() throws IOException { + return ords.nextDoc(); + } + + @Override + public int advance(int target) throws IOException { + return ords.advance(target); + } + + @Override + public long cost() { + return ords.cost(); + } + }; + } + + private abstract class BaseSortedDocValues extends SortedDocValues { + + final SortedEntry entry; + final TermsEnum termsEnum; + + BaseSortedDocValues(SortedEntry entry) throws IOException { + this.entry = entry; + this.termsEnum = termsEnum(); + } + + @Override + public int getValueCount() { + return Math.toIntExact(entry.termsDictEntry.termsDictSize); + } + + @Override + public BytesRef lookupOrd(int ord) throws IOException { + termsEnum.seekExact(ord); + return termsEnum.term(); + } + + @Override + public int lookupTerm(BytesRef key) throws IOException { + TermsEnum.SeekStatus status = termsEnum.seekCeil(key); + switch (status) { + case FOUND: + return Math.toIntExact(termsEnum.ord()); + case NOT_FOUND: + case END: + default: + return Math.toIntExact(-1L - termsEnum.ord()); + } + } + + @Override + public TermsEnum termsEnum() throws IOException { + return new TermsDict(entry.termsDictEntry, data); + } + } + + private abstract class BaseSortedSetDocValues extends SortedSetDocValues { + + final SortedSetEntry entry; + final IndexInput data; + final TermsEnum termsEnum; + + BaseSortedSetDocValues(SortedSetEntry entry, IndexInput data) throws IOException { + this.entry = entry; + this.data = data; + this.termsEnum = termsEnum(); + } + + @Override + public long getValueCount() { + return entry.termsDictEntry.termsDictSize; + } + + @Override + public BytesRef lookupOrd(long ord) throws IOException { + termsEnum.seekExact(ord); + return termsEnum.term(); + } + + @Override + public long lookupTerm(BytesRef key) throws IOException { + TermsEnum.SeekStatus status = termsEnum.seekCeil(key); + switch (status) { + case FOUND: + return termsEnum.ord(); + case NOT_FOUND: + case END: + default: + return -1L - termsEnum.ord(); + } + } + + @Override + public TermsEnum termsEnum() throws IOException { + return new TermsDict(entry.termsDictEntry, data); + } + } + + private class TermsDict extends BaseTermsEnum { + static final int LZ4_DECOMPRESSOR_PADDING = 7; + + final TermsDictEntry entry; + final LongValues blockAddresses; + final IndexInput bytes; + final long blockMask; + final LongValues indexAddresses; + final IndexInput indexBytes; + final BytesRef term; + long ord = -1; + + BytesRef blockBuffer = null; + ByteArrayDataInput blockInput = null; + long currentCompressedBlockStart = -1; + long currentCompressedBlockEnd = -1; + + TermsDict(TermsDictEntry entry, IndexInput data) throws IOException { + this.entry = entry; + RandomAccessInput addressesSlice = data.randomAccessSlice(entry.termsAddressesOffset, entry.termsAddressesLength); + blockAddresses = DirectMonotonicReader.getInstance(entry.termsAddressesMeta, addressesSlice); + bytes = data.slice("terms", entry.termsDataOffset, entry.termsDataLength); + blockMask = (1L << TERMS_DICT_BLOCK_LZ4_SHIFT) - 1; + RandomAccessInput indexAddressesSlice = data.randomAccessSlice( + entry.termsIndexAddressesOffset, + entry.termsIndexAddressesLength + ); + indexAddresses = DirectMonotonicReader.getInstance(entry.termsIndexAddressesMeta, indexAddressesSlice); + indexBytes = data.slice("terms-index", entry.termsIndexOffset, entry.termsIndexLength); + term = new BytesRef(entry.maxTermLength); + + // add the max term length for the dictionary + // add 7 padding bytes can help decompression run faster. + int bufferSize = entry.maxBlockLength + entry.maxTermLength + LZ4_DECOMPRESSOR_PADDING; + blockBuffer = new BytesRef(new byte[bufferSize], 0, bufferSize); + } + + @Override + public BytesRef next() throws IOException { + if (++ord >= entry.termsDictSize) { + return null; + } + + if ((ord & blockMask) == 0L) { + decompressBlock(); + } else { + DataInput input = blockInput; + final int token = Byte.toUnsignedInt(input.readByte()); + int prefixLength = token & 0x0F; + int suffixLength = 1 + (token >>> 4); + if (prefixLength == 15) { + prefixLength += input.readVInt(); + } + if (suffixLength == 16) { + suffixLength += input.readVInt(); + } + term.length = prefixLength + suffixLength; + input.readBytes(term.bytes, prefixLength, suffixLength); + } + return term; + } + + @Override + public void seekExact(long ord) throws IOException { + if (ord < 0 || ord >= entry.termsDictSize) { + throw new IndexOutOfBoundsException(); + } + // Signed shift since ord is -1 when the terms enum is not positioned + final long currentBlockIndex = this.ord >> TERMS_DICT_BLOCK_LZ4_SHIFT; + final long blockIndex = ord >> TERMS_DICT_BLOCK_LZ4_SHIFT; + if (ord < this.ord || blockIndex != currentBlockIndex) { + // The looked up ord is before the current ord or belongs to a different block, seek again + final long blockAddress = blockAddresses.get(blockIndex); + bytes.seek(blockAddress); + this.ord = (blockIndex << TERMS_DICT_BLOCK_LZ4_SHIFT) - 1; + } + // Scan to the looked up ord + while (this.ord < ord) { + next(); + } + } + + private BytesRef getTermFromIndex(long index) throws IOException { + assert index >= 0 && index <= (entry.termsDictSize - 1) >>> entry.termsDictIndexShift; + final long start = indexAddresses.get(index); + term.length = (int) (indexAddresses.get(index + 1) - start); + indexBytes.seek(start); + indexBytes.readBytes(term.bytes, 0, term.length); + return term; + } + + private long seekTermsIndex(BytesRef text) throws IOException { + long lo = 0L; + long hi = (entry.termsDictSize - 1) >> entry.termsDictIndexShift; + while (lo <= hi) { + final long mid = (lo + hi) >>> 1; + getTermFromIndex(mid); + final int cmp = term.compareTo(text); + if (cmp <= 0) { + lo = mid + 1; + } else { + hi = mid - 1; + } + } + + assert hi < 0 || getTermFromIndex(hi).compareTo(text) <= 0; + assert hi == ((entry.termsDictSize - 1) >> entry.termsDictIndexShift) || getTermFromIndex(hi + 1).compareTo(text) > 0; + + return hi; + } + + private BytesRef getFirstTermFromBlock(long block) throws IOException { + assert block >= 0 && block <= (entry.termsDictSize - 1) >>> TERMS_DICT_BLOCK_LZ4_SHIFT; + final long blockAddress = blockAddresses.get(block); + bytes.seek(blockAddress); + term.length = bytes.readVInt(); + bytes.readBytes(term.bytes, 0, term.length); + return term; + } + + private long seekBlock(BytesRef text) throws IOException { + long index = seekTermsIndex(text); + if (index == -1L) { + return -1L; + } + + long ordLo = index << entry.termsDictIndexShift; + long ordHi = Math.min(entry.termsDictSize, ordLo + (1L << entry.termsDictIndexShift)) - 1L; + + long blockLo = ordLo >>> TERMS_DICT_BLOCK_LZ4_SHIFT; + long blockHi = ordHi >>> TERMS_DICT_BLOCK_LZ4_SHIFT; + + while (blockLo <= blockHi) { + final long blockMid = (blockLo + blockHi) >>> 1; + getFirstTermFromBlock(blockMid); + final int cmp = term.compareTo(text); + if (cmp <= 0) { + blockLo = blockMid + 1; + } else { + blockHi = blockMid - 1; + } + } + + assert blockHi < 0 || getFirstTermFromBlock(blockHi).compareTo(text) <= 0; + assert blockHi == ((entry.termsDictSize - 1) >>> TERMS_DICT_BLOCK_LZ4_SHIFT) + || getFirstTermFromBlock(blockHi + 1).compareTo(text) > 0; + + return blockHi; + } + + @Override + public SeekStatus seekCeil(BytesRef text) throws IOException { + final long block = seekBlock(text); + if (block == -1) { + // before the first term, or empty terms dict + if (entry.termsDictSize == 0) { + ord = 0; + return SeekStatus.END; + } else { + seekExact(0L); + return SeekStatus.NOT_FOUND; + } + } + final long blockAddress = blockAddresses.get(block); + this.ord = block << TERMS_DICT_BLOCK_LZ4_SHIFT; + bytes.seek(blockAddress); + decompressBlock(); + + while (true) { + int cmp = term.compareTo(text); + if (cmp == 0) { + return SeekStatus.FOUND; + } else if (cmp > 0) { + return SeekStatus.NOT_FOUND; + } + if (next() == null) { + return SeekStatus.END; + } + } + } + + private void decompressBlock() throws IOException { + // The first term is kept uncompressed, so no need to decompress block if only + // look up the first term when doing seek block. + term.length = bytes.readVInt(); + bytes.readBytes(term.bytes, 0, term.length); + long offset = bytes.getFilePointer(); + if (offset < entry.termsDataLength - 1) { + // Avoid decompress again if we are reading a same block. + if (currentCompressedBlockStart != offset) { + blockBuffer.offset = term.length; + blockBuffer.length = bytes.readVInt(); + // Decompress the remaining of current block, using the first term as a dictionary + System.arraycopy(term.bytes, 0, blockBuffer.bytes, 0, blockBuffer.offset); + LZ4.decompress(bytes, blockBuffer.length, blockBuffer.bytes, blockBuffer.offset); + currentCompressedBlockStart = offset; + currentCompressedBlockEnd = bytes.getFilePointer(); + } else { + // Skip decompression but need to re-seek to block end. + bytes.seek(currentCompressedBlockEnd); + } + + // Reset the buffer. + blockInput = new ByteArrayDataInput(blockBuffer.bytes, blockBuffer.offset, blockBuffer.length); + } + } + + @Override + public BytesRef term() throws IOException { + return term; + } + + @Override + public long ord() throws IOException { + return ord; + } + + @Override + public long totalTermFreq() throws IOException { + return -1L; + } + + @Override + public PostingsEnum postings(PostingsEnum reuse, int flags) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public ImpactsEnum impacts(int flags) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public int docFreq() throws IOException { + throw new UnsupportedOperationException(); + } } @Override public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException { SortedNumericEntry entry = sortedNumerics.get(field.name); - return getSortedNumeric(entry); + return getSortedNumeric(entry, -1); } @Override public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException { - throw new UnsupportedOperationException("Unsupported sorted set doc values for field [" + field.name + "]"); + SortedSetEntry entry = sortedSets.get(field.name); + if (entry.singleValueEntry != null) { + return DocValues.singleton(getSorted(entry.singleValueEntry)); + } + + SortedNumericEntry ordsEntry = entry.ordsEntry; + final SortedNumericDocValues ords = getSortedNumeric(ordsEntry, entry.termsDictEntry.termsDictSize); + return new BaseSortedSetDocValues(entry, data) { + + int i = 0; + int count = 0; + boolean set = false; + + @Override + public long nextOrd() throws IOException { + if (set == false) { + set = true; + i = 0; + count = ords.docValueCount(); + } + if (i++ == count) { + return NO_MORE_ORDS; + } + return ords.nextValue(); + } + + @Override + public int docValueCount() { + return ords.docValueCount(); + } + + @Override + public boolean advanceExact(int target) throws IOException { + set = false; + return ords.advanceExact(target); + } + + @Override + public int docID() { + return ords.docID(); + } + + @Override + public int nextDoc() throws IOException { + set = false; + return ords.nextDoc(); + } + + @Override + public int advance(int target) throws IOException { + set = false; + return ords.advance(target); + } + + @Override + public long cost() { + return ords.cost(); + } + }; } @Override @@ -147,9 +575,9 @@ private void readFields(IndexInput meta, FieldInfos infos) throws IOException { } else if (type == ES87TSDBDocValuesFormat.BINARY) { throw new CorruptIndexException("unsupported type: " + type, meta); } else if (type == ES87TSDBDocValuesFormat.SORTED) { - throw new CorruptIndexException("unsupported type: " + type, meta); + sorted.put(info.name, readSorted(meta)); } else if (type == ES87TSDBDocValuesFormat.SORTED_SET) { - throw new CorruptIndexException("unsupported type: " + type, meta); + sortedSets.put(info.name, readSortedSet(meta)); } else if (type == ES87TSDBDocValuesFormat.SORTED_NUMERIC) { sortedNumerics.put(info.name, readSortedNumeric(meta)); } else { @@ -172,11 +600,15 @@ private static void readNumeric(IndexInput meta, NumericEntry entry) throws IOEx entry.numValues = meta.readLong(); if (entry.numValues > 0) { final int indexBlockShift = meta.readInt(); - entry.indexMeta = DirectMonotonicReader.loadMeta( - meta, - 1 + ((entry.numValues - 1) >>> ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SHIFT), - indexBlockShift - ); + // Special case, -1 means there are no blocks, so no need to load the metadata for it + // -1 is written when there the cardinality of a field is exactly one. + if (indexBlockShift != -1) { + entry.indexMeta = DirectMonotonicReader.loadMeta( + meta, + 1 + ((entry.numValues - 1) >>> ES87TSDBDocValuesFormat.NUMERIC_BLOCK_SHIFT), + indexBlockShift + ); + } entry.indexOffset = meta.readLong(); entry.indexLength = meta.readLong(); entry.valuesOffset = meta.readLong(); @@ -202,16 +634,152 @@ private static SortedNumericEntry readSortedNumeric(IndexInput meta, SortedNumer return entry; } + private SortedEntry readSorted(IndexInput meta) throws IOException { + SortedEntry entry = new SortedEntry(); + entry.ordsEntry = new NumericEntry(); + readNumeric(meta, entry.ordsEntry); + entry.termsDictEntry = new TermsDictEntry(); + readTermDict(meta, entry.termsDictEntry); + return entry; + } + + private SortedSetEntry readSortedSet(IndexInput meta) throws IOException { + SortedSetEntry entry = new SortedSetEntry(); + byte multiValued = meta.readByte(); + switch (multiValued) { + case 0: // singlevalued + entry.singleValueEntry = readSorted(meta); + return entry; + case 1: // multivalued + break; + default: + throw new CorruptIndexException("Invalid multiValued flag: " + multiValued, meta); + } + entry.ordsEntry = new SortedNumericEntry(); + readSortedNumeric(meta, entry.ordsEntry); + entry.termsDictEntry = new TermsDictEntry(); + readTermDict(meta, entry.termsDictEntry); + return entry; + } + + private static void readTermDict(IndexInput meta, TermsDictEntry entry) throws IOException { + entry.termsDictSize = meta.readVLong(); + final int blockShift = meta.readInt(); + final long addressesSize = (entry.termsDictSize + (1L << TERMS_DICT_BLOCK_LZ4_SHIFT) - 1) >>> TERMS_DICT_BLOCK_LZ4_SHIFT; + entry.termsAddressesMeta = DirectMonotonicReader.loadMeta(meta, addressesSize, blockShift); + entry.maxTermLength = meta.readInt(); + entry.maxBlockLength = meta.readInt(); + entry.termsDataOffset = meta.readLong(); + entry.termsDataLength = meta.readLong(); + entry.termsAddressesOffset = meta.readLong(); + entry.termsAddressesLength = meta.readLong(); + entry.termsDictIndexShift = meta.readInt(); + final long indexSize = (entry.termsDictSize + (1L << entry.termsDictIndexShift) - 1) >>> entry.termsDictIndexShift; + entry.termsIndexAddressesMeta = DirectMonotonicReader.loadMeta(meta, 1 + indexSize, blockShift); + entry.termsIndexOffset = meta.readLong(); + entry.termsIndexLength = meta.readLong(); + entry.termsIndexAddressesOffset = meta.readLong(); + entry.termsIndexAddressesLength = meta.readLong(); + } + private abstract static class NumericValues { abstract long advance(long index) throws IOException; } - private NumericDocValues getNumeric(NumericEntry entry) throws IOException { + private NumericDocValues getNumeric(NumericEntry entry, long maxOrd) throws IOException { if (entry.docsWithFieldOffset == -2) { // empty return DocValues.emptyNumeric(); } + if (maxOrd == 1) { + // Special case for maxOrd 1, no need to read blocks and use ordinal 0 as only value + if (entry.docsWithFieldOffset == -1) { + // Special case when all docs have a value + return new NumericDocValues() { + + private final int maxDoc = ES87TSDBDocValuesProducer.this.maxDoc; + private int doc = -1; + + @Override + public long longValue() { + // Only one ordinal! + return 0L; + } + + @Override + public int docID() { + return doc; + } + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + @Override + public int advance(int target) throws IOException { + if (target >= maxDoc) { + return doc = NO_MORE_DOCS; + } + return doc = target; + } + + @Override + public boolean advanceExact(int target) { + doc = target; + return true; + } + + @Override + public long cost() { + return maxDoc; + } + }; + } else { + final IndexedDISI disi = new IndexedDISI( + data, + entry.docsWithFieldOffset, + entry.docsWithFieldLength, + entry.jumpTableEntryCount, + entry.denseRankPower, + entry.numValues + ); + return new NumericDocValues() { + + @Override + public int advance(int target) throws IOException { + return disi.advance(target); + } + + @Override + public boolean advanceExact(int target) throws IOException { + return disi.advanceExact(target); + } + + @Override + public int nextDoc() throws IOException { + return disi.nextDoc(); + } + + @Override + public int docID() { + return disi.docID(); + } + + @Override + public long cost() { + return disi.cost(); + } + + @Override + public long longValue() { + return 0L; + } + }; + } + } + // NOTE: we could make this a bit simpler by reusing #getValues but this // makes things slower. @@ -219,6 +787,7 @@ private NumericDocValues getNumeric(NumericEntry entry) throws IOException { final DirectMonotonicReader indexReader = DirectMonotonicReader.getInstance(entry.indexMeta, indexSlice); final IndexInput valuesData = data.slice("values", entry.valuesOffset, entry.valuesLength); + final int bitsPerOrd = maxOrd >= 0 ? PackedInts.bitsRequired(maxOrd - 1) : -1; if (entry.docsWithFieldOffset == -1) { // dense return new NumericDocValues() { @@ -269,7 +838,11 @@ public long longValue() throws IOException { valuesData.seek(indexReader.get(blockIndex)); } currentBlockIndex = blockIndex; - decoder.decode(valuesData, currentBlock); + if (maxOrd >= 0) { + decoder.decodeOrdinals(valuesData, currentBlock, bitsPerOrd); + } else { + decoder.decode(valuesData, currentBlock); + } } return currentBlock[blockInIndex]; } @@ -325,7 +898,11 @@ public long longValue() throws IOException { valuesData.seek(indexReader.get(blockIndex)); } currentBlockIndex = blockIndex; - decoder.decode(valuesData, currentBlock); + if (maxOrd >= 0) { + decoder.decodeOrdinals(valuesData, currentBlock, bitsPerOrd); + } else { + decoder.decode(valuesData, currentBlock); + } } return currentBlock[blockInIndex]; } @@ -333,12 +910,13 @@ public long longValue() throws IOException { } } - private NumericValues getValues(NumericEntry entry) throws IOException { + private NumericValues getValues(NumericEntry entry, final long maxOrd) throws IOException { assert entry.numValues > 0; final RandomAccessInput indexSlice = data.randomAccessSlice(entry.indexOffset, entry.indexLength); final DirectMonotonicReader indexReader = DirectMonotonicReader.getInstance(entry.indexMeta, indexSlice); final IndexInput valuesData = data.slice("values", entry.valuesOffset, entry.valuesLength); + final int bitsPerOrd = maxOrd >= 0 ? PackedInts.bitsRequired(maxOrd - 1) : -1; return new NumericValues() { private final ES87TSDBDocValuesEncoder decoder = new ES87TSDBDocValuesEncoder(); @@ -355,22 +933,26 @@ long advance(long index) throws IOException { valuesData.seek(indexReader.get(blockIndex)); } currentBlockIndex = blockIndex; - decoder.decode(valuesData, currentBlock); + if (bitsPerOrd >= 0) { + decoder.decodeOrdinals(valuesData, currentBlock, bitsPerOrd); + } else { + decoder.decode(valuesData, currentBlock); + } } return currentBlock[blockInIndex]; } }; } - private SortedNumericDocValues getSortedNumeric(SortedNumericEntry entry) throws IOException { + private SortedNumericDocValues getSortedNumeric(SortedNumericEntry entry, long maxOrd) throws IOException { if (entry.numValues == entry.numDocsWithField) { - return DocValues.singleton(getNumeric(entry)); + return DocValues.singleton(getNumeric(entry, maxOrd)); } final RandomAccessInput addressesInput = data.randomAccessSlice(entry.addressesOffset, entry.addressesLength); final LongValues addresses = DirectMonotonicReader.getInstance(entry.addressesMeta, addressesInput); - final NumericValues values = getValues(entry); + final NumericValues values = getValues(entry, maxOrd); if (entry.docsWithFieldOffset == -1) { // dense @@ -514,4 +1096,33 @@ private static class SortedNumericEntry extends NumericEntry { long addressesLength; } + private static class SortedEntry { + NumericEntry ordsEntry; + TermsDictEntry termsDictEntry; + } + + private static class SortedSetEntry { + SortedEntry singleValueEntry; + SortedNumericEntry ordsEntry; + TermsDictEntry termsDictEntry; + } + + private static class TermsDictEntry { + long termsDictSize; + DirectMonotonicReader.Meta termsAddressesMeta; + int maxTermLength; + long termsDataOffset; + long termsDataLength; + long termsAddressesOffset; + long termsAddressesLength; + int termsDictIndexShift; + DirectMonotonicReader.Meta termsIndexAddressesMeta; + long termsIndexOffset; + long termsIndexLength; + long termsIndexAddressesOffset; + long termsIndexAddressesLength; + + int maxBlockLength; + } + } diff --git a/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesEncoderTests.java b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesEncoderTests.java index eb06a03cab434..bf0737ebda47b 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesEncoderTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesEncoderTests.java @@ -14,6 +14,7 @@ import org.apache.lucene.store.IndexOutput; import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.NumericUtils; +import org.apache.lucene.util.packed.PackedInts; import java.io.IOException; import java.util.Arrays; @@ -189,4 +190,97 @@ private void doTest(long[] arr, long expectedNumBytes) throws IOException { } } } + + public void testEncodeOrdinalsSingleValueSmall() throws IOException { + long[] arr = new long[blockSize]; + Arrays.fill(arr, 63); + final long expectedNumBytes = 1; + + doTestOrdinals(arr, expectedNumBytes); + } + + public void testEncodeOrdinalsSingleValueMedium() throws IOException { + long[] arr = new long[blockSize]; + Arrays.fill(arr, 64); + final long expectedNumBytes = 2; + + doTestOrdinals(arr, expectedNumBytes); + } + + public void testEncodeOrdinalsSingleValueLarge() throws IOException { + long[] arr = new long[blockSize]; + final long expectedNumBytes = 3; + // each byte of a vlong can store 7 bits (first bit is continuation bit) + // first byte can only store 6 bits as the first bit is the header + Arrays.fill(arr, (1 << 6 + 7 + 7) - 1); + + doTestOrdinals(arr, expectedNumBytes); + } + + public void testEncodeOrdinalsSingleValueGrande() throws IOException { + long[] arr = new long[blockSize]; + Arrays.fill(arr, Long.MAX_VALUE); + final long expectedNumBytes = 1 + blockSize * Long.BYTES; + + doTestOrdinals(arr, expectedNumBytes); + } + + public void testEncodeOrdinalsTwoValuesSmall() throws IOException { + long[] arr = new long[blockSize]; + Arrays.fill(arr, 63); + arr[0] = 1; + final long expectedNumBytes = 3; + + doTestOrdinals(arr, expectedNumBytes); + } + + public void testEncodeOrdinalsTwoValuesLarge() throws IOException { + long[] arr = new long[blockSize]; + Arrays.fill(arr, Long.MAX_VALUE >> 2); + arr[0] = (Long.MAX_VALUE >> 2) - 1; + final long expectedNumBytes = 11; + + doTestOrdinals(arr, expectedNumBytes); + } + + public void testEncodeOrdinalsTwoValuesGrande() throws IOException { + long[] arr = new long[blockSize]; + Arrays.fill(arr, Long.MAX_VALUE); + arr[0] = Long.MAX_VALUE - 1; + final long expectedNumBytes = 1 + blockSize * Long.BYTES; + + doTestOrdinals(arr, expectedNumBytes); + } + + public void testEncodeOrdinalsNoRepetitions() throws IOException { + long[] arr = new long[blockSize]; + for (int i = 0; i < blockSize; ++i) { + arr[i] = i; + } + doTestOrdinals(arr, 113); + } + + private void doTestOrdinals(long[] arr, long expectedNumBytes) throws IOException { + long maxOrd = 0; + for (long ord : arr) { + maxOrd = Math.max(maxOrd, ord); + } + final int bitsPerOrd = PackedInts.bitsRequired(maxOrd - 1); + final long[] expected = arr.clone(); + try (Directory dir = newDirectory()) { + try (IndexOutput out = dir.createOutput("tests.bin", IOContext.DEFAULT)) { + encoder.encodeOrdinals(arr, out, bitsPerOrd); + assertEquals(expectedNumBytes, out.getFilePointer()); + } + try (IndexInput in = dir.openInput("tests.bin", IOContext.DEFAULT)) { + long[] decoded = new long[blockSize]; + for (int i = 0; i < decoded.length; ++i) { + decoded[i] = random().nextLong(); + } + encoder.decodeOrdinals(in, decoded, bitsPerOrd); + assertEquals(in.length(), in.getFilePointer()); + assertArrayEquals(expected, decoded); + } + } + } } diff --git a/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatTests.java index 1fe24f2e740ea..8c433173e561a 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatTests.java @@ -8,15 +8,30 @@ package org.elasticsearch.index.codec.tsdb; +import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.codecs.Codec; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.SortedDocValuesField; +import org.apache.lucene.document.SortedSetDocValuesField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.analysis.MockAnalyzer; import org.apache.lucene.tests.index.BaseDocValuesFormatTestCase; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.tests.util.TestUtil; +import org.apache.lucene.util.BytesRef; import java.io.IOException; -import java.util.function.Supplier; public class ES87TSDBDocValuesFormatTests extends BaseDocValuesFormatTestCase { + private static final int NUM_DOCS = 10; + private final Codec codec = TestUtil.alwaysDocValuesFormat(new ES87TSDBDocValuesFormat()); @Override @@ -24,6 +39,83 @@ protected Codec getCodec() { return codec; } + public void testSortedDocValuesSingleUniqueValue() throws IOException { + try (Directory directory = newDirectory()) { + Analyzer analyzer = new MockAnalyzer(random()); + IndexWriterConfig conf = newIndexWriterConfig(analyzer); + conf.setMergePolicy(newLogMergePolicy()); + try (RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, conf)) { + for (int i = 0; i < NUM_DOCS; i++) { + Document doc = new Document(); + doc.add(new SortedDocValuesField("field", newBytesRef("value"))); + doc.add(new SortedDocValuesField("field" + i, newBytesRef("value" + i))); + iwriter.addDocument(doc); + } + iwriter.forceMerge(1); + } + try (IndexReader ireader = maybeWrapWithMergingReader(DirectoryReader.open(directory))) { + assert ireader.leaves().size() == 1; + SortedDocValues field = ireader.leaves().get(0).reader().getSortedDocValues("field"); + for (int i = 0; i < NUM_DOCS; i++) { + assertEquals(i, field.nextDoc()); + assertEquals(0, field.ordValue()); + BytesRef scratch = field.lookupOrd(0); + assertEquals("value", scratch.utf8ToString()); + } + assertEquals(DocIdSetIterator.NO_MORE_DOCS, field.nextDoc()); + for (int i = 0; i < NUM_DOCS; i++) { + SortedDocValues fieldN = ireader.leaves().get(0).reader().getSortedDocValues("field" + i); + assertEquals(i, fieldN.nextDoc()); + assertEquals(0, fieldN.ordValue()); + BytesRef scratch = fieldN.lookupOrd(0); + assertEquals("value" + i, scratch.utf8ToString()); + assertEquals(DocIdSetIterator.NO_MORE_DOCS, fieldN.nextDoc()); + } + } + } + } + + public void testSortedSetDocValuesSingleUniqueValue() throws IOException { + try (Directory directory = newDirectory()) { + Analyzer analyzer = new MockAnalyzer(random()); + IndexWriterConfig conf = newIndexWriterConfig(analyzer); + conf.setMergePolicy(newLogMergePolicy()); + try (RandomIndexWriter iwriter = new RandomIndexWriter(random(), directory, conf)) { + for (int i = 0; i < NUM_DOCS; i++) { + Document doc = new Document(); + doc.add(new SortedSetDocValuesField("field", newBytesRef("value"))); + doc.add(new SortedSetDocValuesField("field" + i, newBytesRef("value" + i))); + iwriter.addDocument(doc); + } + iwriter.forceMerge(1); + } + + try (IndexReader ireader = maybeWrapWithMergingReader(DirectoryReader.open(directory))) { + assert ireader.leaves().size() == 1; + var field = ireader.leaves().get(0).reader().getSortedSetDocValues("field"); + for (int i = 0; i < NUM_DOCS; i++) { + assertEquals(i, field.nextDoc()); + assertEquals(1, field.docValueCount()); + assertEquals(0, field.nextOrd()); + BytesRef scratch = field.lookupOrd(0); + assertEquals("value", scratch.utf8ToString()); + assertEquals(SortedSetDocValues.NO_MORE_ORDS, field.nextOrd()); + } + assertEquals(DocIdSetIterator.NO_MORE_DOCS, field.nextDoc()); + for (int i = 0; i < NUM_DOCS; i++) { + var fieldN = ireader.leaves().get(0).reader().getSortedSetDocValues("field" + i); + assertEquals(i, fieldN.nextDoc()); + assertEquals(1, fieldN.docValueCount()); + assertEquals(0, fieldN.nextOrd()); + BytesRef scratch = fieldN.lookupOrd(0); + assertEquals("value" + i, scratch.utf8ToString()); + assertEquals(DocIdSetIterator.NO_MORE_DOCS, fieldN.nextDoc()); + assertEquals(SortedSetDocValues.NO_MORE_ORDS, fieldN.nextOrd()); + } + } + } + } + // NOTE: here and below we disable tests dealing with non-numeric fields // because ES87TSDBDocValuesFormat only deals with numeric fields. @Override @@ -66,31 +158,6 @@ public void testBytesMergeAwayAllValues() { assumeTrue("doc values format only supports numerics", false); } - @Override - public void testSortedBytes() { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedBytesTwoDocuments() { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedBytesThreeDocuments() { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedBytesTwoDocumentsMerged() { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedMergeAwayAllValues() { - assumeTrue("doc values format only supports numerics", false); - } - @Override public void testBytesWithNewline() { assumeTrue("doc values format only supports numerics", false); @@ -101,16 +168,6 @@ public void testMissingSortedBytes() { assumeTrue("doc values format only supports numerics", false); } - @Override - public void testSortedTermsEnum() { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testEmptySortedBytes() { - assumeTrue("doc values format only supports numerics", false); - } - @Override public void testEmptyBytes() { assumeTrue("doc values format only supports numerics", false); @@ -136,16 +193,6 @@ public void testCodecUsesOwnSortedBytes() throws IOException { assumeTrue("doc values format only supports numerics", false); } - @Override - public void testDocValuesSimple() throws IOException { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testRandomSortedBytes() throws IOException { - assumeTrue("doc values format only supports numerics", false); - } - @Override public void testBinaryFixedLengthVsStoredFields() { assumeTrue("doc values format only supports numerics", false); @@ -171,141 +218,6 @@ public void doTestBinaryVariableLengthVsStoredFields(double density) throws Exce assumeTrue("doc values format only supports numerics", false); } - @Override - protected void doTestSortedVsStoredFields(int numDocs, double density, Supplier bytes) throws Exception { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedFixedLengthVsStoredFields() { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSparseSortedFixedLengthVsStoredFields() { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedVariableLengthVsStoredFields() { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSparseSortedVariableLengthVsStoredFields() { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - protected void doTestSortedVsStoredFields(int numDocs, double density, int minLength, int maxLength) throws Exception { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedSetOneValue() throws IOException { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedSetTwoFields() throws IOException { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedSetTwoDocumentsMerged() throws IOException { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedSetTwoValues() throws IOException { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedSetTwoValuesUnordered() throws IOException { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedSetThreeValuesTwoDocs() throws IOException { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedSetTwoDocumentsLastMissing() throws IOException { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedSetTwoDocumentsLastMissingMerge() throws IOException { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedSetTwoDocumentsFirstMissing() throws IOException { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedSetTwoDocumentsFirstMissingMerge() throws IOException { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedSetMergeAwayAllValues() throws IOException { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedSetTermsEnum() throws IOException { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedSetFixedLengthVsStoredFields() { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedSetVariableLengthVsStoredFields() { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedSetFixedLengthSingleValuedVsStoredFields() { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedSetVariableLengthSingleValuedVsStoredFields() { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedSetFixedLengthFewUniqueSetsVsStoredFields() { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedSetVariableLengthFewUniqueSetsVsStoredFields() { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedSetVariableLengthManyValuesPerDocVsStoredFields() { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedSetFixedLengthManyValuesPerDocVsStoredFields() { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSparseGCDCompression() { - assumeTrue("doc values format only supports numerics", false); - } - @Override public void testTwoBytesOneMissing() throws IOException { assumeTrue("doc values format only supports numerics", false); @@ -331,61 +243,6 @@ public void testEmptyBinaryValueOnPageSizes() { assumeTrue("doc values format only supports numerics", false); } - @Override - public void testOneSortedNumber() throws IOException { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testOneSortedNumberOneMissing() throws IOException { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testTwoSortedNumber() throws IOException { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testTwoSortedNumberSameValue() throws IOException { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testTwoSortedNumberOneMissing() throws IOException { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedNumberMerge() throws IOException { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedNumberMergeAwayAllValues() throws IOException { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedEnumAdvanceIndependently() throws IOException { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedSetEnumAdvanceIndependently() throws IOException { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedMergeAwayAllValuesLargeSegment() throws IOException { - assumeTrue("doc values format only supports numerics", false); - } - - @Override - public void testSortedSetMergeAwayAllValuesLargeSegment() throws IOException { - assumeTrue("doc values format only supports numerics", false); - } - @Override public void testBinaryMergeAwayAllValuesLargeSegment() throws IOException { assumeTrue("doc values format only supports numerics", false); @@ -396,11 +253,6 @@ public void testRandomAdvanceBinary() throws IOException { assumeTrue("doc values format only supports numerics", false); } - @Override - public void testHighOrdsSortedSetDV() { - assumeTrue("doc values format only supports numerics", false); - } - @Override public void testCheckIntegrityReadsAllBytes() { assumeTrue("doc values format only supports numerics", false); @@ -415,4 +267,5 @@ public void testMergeStability() { public void testRandomExceptions() { assumeTrue("doc values format only supports numerics", false); } + } From 31c66c1a8c09032debb64ed8d34d642aeffc5852 Mon Sep 17 00:00:00 2001 From: Simon Cooper Date: Tue, 6 Feb 2024 10:30:59 +0000 Subject: [PATCH 054/106] Add a few examples of using version mapping in error messages (#104751) --- .../main/java/org/elasticsearch/ReleaseVersions.java | 12 +++++++++--- .../elasticsearch/cluster/routing/ShardRouting.java | 2 +- .../decider/NodeVersionAllocationDecider.java | 8 ++++---- .../cluster/version/CompatibilityVersions.java | 8 ++++---- .../org/elasticsearch/snapshots/RestoreService.java | 8 ++++---- .../java/org/elasticsearch/ReleaseVersionsTests.java | 2 +- .../NodeVersionAllocationDeciderTests.java | 8 ++++---- .../cluster/version/CompatibilityVersionsTests.java | 2 +- .../elasticsearch/upgrades/CcrRollingUpgradeIT.java | 4 ++++ 9 files changed, 32 insertions(+), 22 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/ReleaseVersions.java b/server/src/main/java/org/elasticsearch/ReleaseVersions.java index 440603cf10ae5..6e7cf6c428cd6 100644 --- a/server/src/main/java/org/elasticsearch/ReleaseVersions.java +++ b/server/src/main/java/org/elasticsearch/ReleaseVersions.java @@ -9,6 +9,7 @@ package org.elasticsearch; import org.elasticsearch.common.Strings; +import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.internal.BuildExtension; import org.elasticsearch.plugins.ExtensionLoader; @@ -104,9 +105,14 @@ private static IntFunction lookupFunction(NavigableMap versions = ReleaseVersions.generateVersionsLookup(ReleaseVersionsTests.class); assertThat(versions.apply(17), equalTo("8.1.2-8.2.0")); - expectThrows(AssertionError.class, () -> versions.apply(9)); + assertThat(versions.apply(9), equalTo("0.0.0-8.0.0")); assertThat(versions.apply(24), equalTo("8.2.2-snapshot[24]")); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java index 32a1e1d14876f..00602a5f35d76 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java @@ -625,9 +625,9 @@ public void testMessages() { decision.getExplanation(), is( "max supported index version [" - + oldNode.node().getMaxIndexVersion() + + oldNode.node().getMaxIndexVersion().toReleaseVersion() + "] is older than the snapshot version [" - + newNode.node().getMaxIndexVersion() + + newNode.node().getMaxIndexVersion().toReleaseVersion() + "]" ) ); @@ -642,9 +642,9 @@ public void testMessages() { decision.getExplanation(), is( "max supported index version [" - + newNode.node().getMaxIndexVersion() + + newNode.node().getMaxIndexVersion().toReleaseVersion() + "] is the same or newer than snapshot version [" - + oldNode.node().getMaxIndexVersion() + + oldNode.node().getMaxIndexVersion().toReleaseVersion() + "]" ) ); diff --git a/server/src/test/java/org/elasticsearch/cluster/version/CompatibilityVersionsTests.java b/server/src/test/java/org/elasticsearch/cluster/version/CompatibilityVersionsTests.java index b3b598f2bd38c..edcf395753281 100644 --- a/server/src/test/java/org/elasticsearch/cluster/version/CompatibilityVersionsTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/version/CompatibilityVersionsTests.java @@ -128,7 +128,7 @@ public void testPreventJoinClusterWithUnsupportedTransportVersion() { compatibilityVersions ) ); - assertThat(e.getMessage(), containsString("may not join a cluster with minimum transport version")); + assertThat(e.getMessage(), containsString("may not join a cluster with minimum version")); } public void testPreventJoinClusterWithUnsupportedMappingsVersion() { diff --git a/x-pack/qa/rolling-upgrade-multi-cluster/src/test/java/org/elasticsearch/upgrades/CcrRollingUpgradeIT.java b/x-pack/qa/rolling-upgrade-multi-cluster/src/test/java/org/elasticsearch/upgrades/CcrRollingUpgradeIT.java index 27250dd4e3367..7b51cf762c2c7 100644 --- a/x-pack/qa/rolling-upgrade-multi-cluster/src/test/java/org/elasticsearch/upgrades/CcrRollingUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade-multi-cluster/src/test/java/org/elasticsearch/upgrades/CcrRollingUpgradeIT.java @@ -234,6 +234,10 @@ public void testCannotFollowLeaderInUpgradedCluster() throws Exception { containsString("the snapshot was created with index version ["), containsString("] which is higher than the version used by this node [") ), + allOf( + containsString("the snapshot was created with version ["), + containsString("] which is higher than the version of this node [") + ), allOf( containsString("the snapshot was created with Elasticsearch version ["), containsString("] which is higher than the version of this node [") From bc043fff59441c16573c5bf6859fa9392a75a017 Mon Sep 17 00:00:00 2001 From: Chris Hegarty <62058229+ChrisHegarty@users.noreply.github.com> Date: Tue, 6 Feb 2024 10:46:46 +0000 Subject: [PATCH 055/106] Update the MrjarPlugin test task to depend upon the jar (#105008) This commit updates the MrjarPlugin so that test tasks depend upon the jar. Tests need the jar since it contains the versioned sections, and the java launcher requires this to load classes from such. --- .../gradle/internal/MrjarPlugin.java | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java index e7bc7e5b6507c..8c5d671e00fe7 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java @@ -11,11 +11,14 @@ import org.elasticsearch.gradle.util.GradleUtils; import org.gradle.api.Plugin; import org.gradle.api.Project; +import org.gradle.api.file.FileCollection; import org.gradle.api.plugins.JavaPlugin; import org.gradle.api.plugins.JavaPluginExtension; import org.gradle.api.tasks.SourceSet; +import org.gradle.api.tasks.SourceSetContainer; import org.gradle.api.tasks.compile.CompileOptions; import org.gradle.api.tasks.compile.JavaCompile; +import org.gradle.api.tasks.testing.Test; import org.gradle.jvm.tasks.Jar; import org.gradle.jvm.toolchain.JavaLanguageVersion; import org.gradle.jvm.toolchain.JavaToolchainService; @@ -72,9 +75,19 @@ private void addMrjarSourceset(Project project, JavaPluginExtension javaExtensio SourceSet sourceSet = javaExtension.getSourceSets().maybeCreate(sourcesetName); GradleUtils.extendSourceSet(project, SourceSet.MAIN_SOURCE_SET_NAME, sourcesetName); - project.getTasks().withType(Jar.class).named(JavaPlugin.JAR_TASK_NAME).configure(jarTask -> { - jarTask.into("META-INF/versions/" + javaVersion, copySpec -> copySpec.from(sourceSet.getOutput())); - jarTask.manifest(manifest -> { manifest.attributes(Map.of("Multi-Release", "true")); }); + var jarTask = project.getTasks().withType(Jar.class).named(JavaPlugin.JAR_TASK_NAME); + jarTask.configure(task -> { + task.into("META-INF/versions/" + javaVersion, copySpec -> copySpec.from(sourceSet.getOutput())); + task.manifest(manifest -> { manifest.attributes(Map.of("Multi-Release", "true")); }); + }); + + project.getTasks().withType(Test.class).named(JavaPlugin.TEST_TASK_NAME).configure(testTask -> { + testTask.dependsOn(jarTask); + + SourceSetContainer sourceSets = GradleUtils.getJavaSourceSets(project); + FileCollection mainRuntime = sourceSets.getByName(SourceSet.MAIN_SOURCE_SET_NAME).getRuntimeClasspath(); + FileCollection testRuntime = sourceSets.getByName(SourceSet.TEST_SOURCE_SET_NAME).getRuntimeClasspath(); + testTask.setClasspath(testRuntime.minus(mainRuntime).plus(project.files(jarTask))); }); project.getTasks().withType(JavaCompile.class).named(sourceSet.getCompileJavaTaskName()).configure(compileTask -> { From 14169c442e60a9cf18bed91307a88897846e5322 Mon Sep 17 00:00:00 2001 From: Navarone Feekery <13634519+navarone-feekery@users.noreply.github.com> Date: Tue, 6 Feb 2024 12:03:36 +0100 Subject: [PATCH 056/106] [Connector Secrets] Add PUT endpoint for connector secrets (#105148) --- .../api/connector_secret.put.json | 32 +++++ .../privilege/ClusterPrivilegeResolver.java | 6 +- .../entsearch/505_connector_secret_put.yml | 71 +++++++++++ .../xpack/application/EnterpriseSearch.java | 13 +- .../secrets/ConnectorSecretsIndexService.java | 23 ++++ .../action/PutConnectorSecretAction.java | 17 +++ .../action/PutConnectorSecretRequest.java | 120 ++++++++++++++++++ .../action/PutConnectorSecretResponse.java | 71 +++++++++++ .../action/RestPutConnectorSecretAction.java | 46 +++++++ .../TransportPutConnectorSecretAction.java | 39 ++++++ .../ConnectorSecretsIndexServiceTests.java | 51 ++++++++ .../secrets/ConnectorSecretsTestUtils.java | 12 ++ .../action/PutConnectorSecretActionTests.java | 35 +++++ ...ectorSecretRequestBWCSerializingTests.java | 38 ++++++ ...ctorSecretResponseBWCSerializingTests.java | 39 ++++++ ...ransportPutConnectorSecretActionTests.java | 72 +++++++++++ .../xpack/security/operator/Constants.java | 1 + .../service/ElasticServiceAccountsTests.java | 1 + 18 files changed, 684 insertions(+), 3 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/connector_secret.put.json create mode 100644 x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/505_connector_secret_put.yml create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/PutConnectorSecretAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/PutConnectorSecretRequest.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/PutConnectorSecretResponse.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/RestPutConnectorSecretAction.java create mode 100644 x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportPutConnectorSecretAction.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/PutConnectorSecretActionTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/PutConnectorSecretRequestBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/PutConnectorSecretResponseBWCSerializingTests.java create mode 100644 x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportPutConnectorSecretActionTests.java diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_secret.put.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_secret.put.json new file mode 100644 index 0000000000000..63ad6ce6e2006 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_secret.put.json @@ -0,0 +1,32 @@ +{ + "connector_secret.put": { + "documentation": { + "url": null, + "description": "Creates or updates a secret for a Connector." + }, + "stability": "experimental", + "visibility":"private", + "headers":{ + "accept": [ "application/json" ] + }, + "url":{ + "paths":[ + { + "path":"/_connector/_secret/{id}", + "methods":[ "PUT" ], + "parts": { + "id": { + "type": "string", + "description": "The unique identifier of the connector secret to be created or updated." + } + } + } + ] + }, + "params":{}, + "body": { + "description":"The secret value to store", + "required":true + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java index 96b4ab3d51bce..63179c7cf09e6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ClusterPrivilegeResolver.java @@ -335,7 +335,11 @@ public class ClusterPrivilegeResolver { public static final NamedClusterPrivilege WRITE_CONNECTOR_SECRETS = new ActionClusterPrivilege( "write_connector_secrets", - Set.of("cluster:admin/xpack/connector/secret/post", "cluster:admin/xpack/connector/secret/delete") + Set.of( + "cluster:admin/xpack/connector/secret/delete", + "cluster:admin/xpack/connector/secret/post", + "cluster:admin/xpack/connector/secret/put" + ) ); private static final Map VALUES = sortByAccessLevel( diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/505_connector_secret_put.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/505_connector_secret_put.yml new file mode 100644 index 0000000000000..1762e454b094d --- /dev/null +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/505_connector_secret_put.yml @@ -0,0 +1,71 @@ +setup: + - skip: + version: " - 8.12.99" + reason: Introduced in 8.13.0 + +--- +'Put connector secret - admin': + - do: + connector_secret.put: + id: test-secret + body: + value: my-secret + - match: { result: 'created' } + + - do: + connector_secret.get: + id: test-secret + - match: { value: my-secret } + + - do: + connector_secret.put: + id: test-secret + body: + value: my-secret-2 + - match: { result: 'updated' } + + - do: + connector_secret.get: + id: test-secret + - match: { value: my-secret-2 } + +--- +'Put connector secret - authorized user': + - skip: + features: headers + + - do: + headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + connector_secret.put: + id: test-secret + body: + value: my-secret + - match: { result: 'created' } + + - do: + headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + connector_secret.get: + id: test-secret + - match: { value: my-secret } + +--- +'Put connector secret - unauthorized user': + - skip: + features: headers + + - do: + headers: { Authorization: "Basic ZW50c2VhcmNoLXVucHJpdmlsZWdlZDplbnRzZWFyY2gtdW5wcml2aWxlZ2VkLXVzZXI=" } # unprivileged + connector_secret.put: + id: test-secret + body: + value: my-secret + catch: unauthorized + +--- +'Put connector secret when id is missing should fail': + - do: + connector_secret.put: + id: test-secret + body: + value: null + catch: bad_request diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index b10d1f9e582a0..322c8994f4314 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -103,12 +103,15 @@ import org.elasticsearch.xpack.application.connector.secrets.action.DeleteConnectorSecretAction; import org.elasticsearch.xpack.application.connector.secrets.action.GetConnectorSecretAction; import org.elasticsearch.xpack.application.connector.secrets.action.PostConnectorSecretAction; +import org.elasticsearch.xpack.application.connector.secrets.action.PutConnectorSecretAction; import org.elasticsearch.xpack.application.connector.secrets.action.RestDeleteConnectorSecretAction; import org.elasticsearch.xpack.application.connector.secrets.action.RestGetConnectorSecretAction; import org.elasticsearch.xpack.application.connector.secrets.action.RestPostConnectorSecretAction; +import org.elasticsearch.xpack.application.connector.secrets.action.RestPutConnectorSecretAction; import org.elasticsearch.xpack.application.connector.secrets.action.TransportDeleteConnectorSecretAction; import org.elasticsearch.xpack.application.connector.secrets.action.TransportGetConnectorSecretAction; import org.elasticsearch.xpack.application.connector.secrets.action.TransportPostConnectorSecretAction; +import org.elasticsearch.xpack.application.connector.secrets.action.TransportPutConnectorSecretAction; import org.elasticsearch.xpack.application.connector.syncjob.action.CancelConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.CheckInConnectorSyncJobAction; import org.elasticsearch.xpack.application.connector.syncjob.action.DeleteConnectorSyncJobAction; @@ -290,7 +293,8 @@ protected XPackLicenseState getLicenseState() { List.of( new ActionHandler<>(DeleteConnectorSecretAction.INSTANCE, TransportDeleteConnectorSecretAction.class), new ActionHandler<>(GetConnectorSecretAction.INSTANCE, TransportGetConnectorSecretAction.class), - new ActionHandler<>(PostConnectorSecretAction.INSTANCE, TransportPostConnectorSecretAction.class) + new ActionHandler<>(PostConnectorSecretAction.INSTANCE, TransportPostConnectorSecretAction.class), + new ActionHandler<>(PutConnectorSecretAction.INSTANCE, TransportPutConnectorSecretAction.class) ) ); } @@ -378,7 +382,12 @@ public List getRestHandlers( if (ConnectorSecretsFeature.isEnabled()) { restHandlers.addAll( - List.of(new RestGetConnectorSecretAction(), new RestPostConnectorSecretAction(), new RestDeleteConnectorSecretAction()) + List.of( + new RestDeleteConnectorSecretAction(), + new RestGetConnectorSecretAction(), + new RestPostConnectorSecretAction(), + new RestPutConnectorSecretAction() + ) ); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsIndexService.java index c994fc1155277..cc25b8e5317d4 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsIndexService.java @@ -12,14 +12,18 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; +import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.indices.SystemIndexDescriptor; +import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.application.connector.secrets.action.DeleteConnectorSecretResponse; import org.elasticsearch.xpack.application.connector.secrets.action.GetConnectorSecretResponse; import org.elasticsearch.xpack.application.connector.secrets.action.PostConnectorSecretRequest; import org.elasticsearch.xpack.application.connector.secrets.action.PostConnectorSecretResponse; +import org.elasticsearch.xpack.application.connector.secrets.action.PutConnectorSecretRequest; +import org.elasticsearch.xpack.application.connector.secrets.action.PutConnectorSecretResponse; import org.elasticsearch.xpack.core.template.TemplateUtils; import java.util.Map; @@ -96,6 +100,25 @@ public void createSecret(PostConnectorSecretRequest request, ActionListener listener) { + + String connectorSecretId = request.id(); + + try { + clientWithOrigin.prepareIndex(CONNECTOR_SECRETS_INDEX_NAME) + .setId(connectorSecretId) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .setSource(request.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)) + .execute( + listener.delegateFailureAndWrap( + (l, indexResponse) -> l.onResponse(new PutConnectorSecretResponse(indexResponse.getResult())) + ) + ); + } catch (Exception e) { + listener.onFailure(e); + } + } + public void deleteSecret(String id, ActionListener listener) { try { clientWithOrigin.prepareDelete(CONNECTOR_SECRETS_INDEX_NAME, id) diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/PutConnectorSecretAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/PutConnectorSecretAction.java new file mode 100644 index 0000000000000..53bfc3f48a539 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/PutConnectorSecretAction.java @@ -0,0 +1,17 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.action.ActionType; + +public class PutConnectorSecretAction { + public static final String NAME = "cluster:admin/xpack/connector/secret/put"; + public static final ActionType INSTANCE = new ActionType<>(NAME); + + private PutConnectorSecretAction() {/* no instances */} +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/PutConnectorSecretRequest.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/PutConnectorSecretRequest.java new file mode 100644 index 0000000000000..94bfceaddb84f --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/PutConnectorSecretRequest.java @@ -0,0 +1,120 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +public class PutConnectorSecretRequest extends ActionRequest implements ToXContentObject { + + private final String id; + private final String value; + + public PutConnectorSecretRequest(String id, String value) { + this.id = id; + this.value = value; + } + + public PutConnectorSecretRequest(StreamInput in) throws IOException { + super(in); + this.id = in.readString(); + this.value = in.readString(); + } + + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "connector_secret_put_request", + false, + ((args, id) -> new PutConnectorSecretRequest(id, (String) args[0])) + ); + + static { + PARSER.declareString(constructorArg(), new ParseField("value")); + } + + public static PutConnectorSecretRequest fromXContentBytes(String id, BytesReference source, XContentType xContentType) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return PutConnectorSecretRequest.fromXContent(parser, id); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse: " + source.utf8ToString(), e); + } + } + + public static PutConnectorSecretRequest fromXContent(XContentParser parser, String id) throws IOException { + return PARSER.parse(parser, id); + } + + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + builder.field("value", value); + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + out.writeString(value); + } + + @Override + public ActionRequestValidationException validate() { + + ActionRequestValidationException exception = null; + + if (Strings.isNullOrEmpty(id())) { + exception = addValidationError("[id] cannot be [null] or [\"\"]", exception); + } + if (Strings.isNullOrEmpty(value())) { + exception = addValidationError("[value] cannot be [null] or [\"\"]", exception); + } + + return exception; + } + + public String id() { + return id; + } + + public String value() { + return value; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PutConnectorSecretRequest request = (PutConnectorSecretRequest) o; + return Objects.equals(id, request.id) && Objects.equals(value, request.value); + } + + @Override + public int hashCode() { + return Objects.hash(value); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/PutConnectorSecretResponse.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/PutConnectorSecretResponse.java new file mode 100644 index 0000000000000..db4db6d3939bd --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/PutConnectorSecretResponse.java @@ -0,0 +1,71 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +public class PutConnectorSecretResponse extends ActionResponse implements ToXContentObject { + + final DocWriteResponse.Result result; + + public PutConnectorSecretResponse(DocWriteResponse.Result result) { + this.result = result; + } + + public PutConnectorSecretResponse(StreamInput in) throws IOException { + super(in); + result = DocWriteResponse.Result.readFrom(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + this.result.writeTo(out); + } + + public DocWriteResponse.Result result() { + return this.result; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("result", this.result.getLowercase()); + builder.endObject(); + return builder; + } + + public RestStatus status() { + return switch (result) { + case CREATED -> RestStatus.CREATED; + case NOT_FOUND -> RestStatus.NOT_FOUND; + default -> RestStatus.OK; + }; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PutConnectorSecretResponse response = (PutConnectorSecretResponse) o; + return result == response.result; + } + + @Override + public int hashCode() { + return Objects.hash(result); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/RestPutConnectorSecretAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/RestPutConnectorSecretAction.java new file mode 100644 index 0000000000000..8857a81282198 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/RestPutConnectorSecretAction.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; +import org.elasticsearch.rest.action.RestToXContentListener; + +import java.io.IOException; +import java.util.List; + +@ServerlessScope(Scope.INTERNAL) +public class RestPutConnectorSecretAction extends BaseRestHandler { + + @Override + public String getName() { + return "connector_secret_put_action"; + } + + @Override + public List routes() { + return List.of(new Route(RestRequest.Method.PUT, "/_connector/_secret/{id}")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + PutConnectorSecretRequest putSecretRequest = PutConnectorSecretRequest.fromXContentBytes( + request.param("id"), + request.content(), + request.getXContentType() + ); + return restChannel -> client.execute( + PutConnectorSecretAction.INSTANCE, + putSecretRequest, + new RestToXContentListener<>(restChannel, PutConnectorSecretResponse::status, r -> null) + ); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportPutConnectorSecretAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportPutConnectorSecretAction.java new file mode 100644 index 0000000000000..bd20d93db1dc3 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportPutConnectorSecretAction.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.secrets.ConnectorSecretsIndexService; + +public class TransportPutConnectorSecretAction extends HandledTransportAction { + + private final ConnectorSecretsIndexService connectorSecretsIndexService; + + @Inject + public TransportPutConnectorSecretAction(TransportService transportService, ActionFilters actionFilters, Client client) { + super( + PutConnectorSecretAction.NAME, + transportService, + actionFilters, + PutConnectorSecretRequest::new, + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + this.connectorSecretsIndexService = new ConnectorSecretsIndexService(client); + } + + protected void doExecute(Task task, PutConnectorSecretRequest request, ActionListener listener) { + connectorSecretsIndexService.createSecretWithDocId(request, listener); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsIndexServiceTests.java index b93c83c6494f3..58b3cc4888d18 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsIndexServiceTests.java @@ -9,11 +9,14 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xpack.application.connector.secrets.action.DeleteConnectorSecretResponse; import org.elasticsearch.xpack.application.connector.secrets.action.GetConnectorSecretResponse; import org.elasticsearch.xpack.application.connector.secrets.action.PostConnectorSecretRequest; import org.elasticsearch.xpack.application.connector.secrets.action.PostConnectorSecretResponse; +import org.elasticsearch.xpack.application.connector.secrets.action.PutConnectorSecretRequest; +import org.elasticsearch.xpack.application.connector.secrets.action.PutConnectorSecretResponse; import org.junit.Before; import java.util.concurrent.CountDownLatch; @@ -44,6 +47,21 @@ public void testCreateAndGetConnectorSecret() throws Exception { assertThat(gotSecret.value(), notNullValue()); } + public void testUpdateConnectorSecret() throws Exception { + String secretId = "secret-id"; + String value = "my-secret-value"; + + PutConnectorSecretRequest updateSecretRequest = new PutConnectorSecretRequest(secretId, value); + + PutConnectorSecretResponse response = awaitPutConnectorSecret(updateSecretRequest); + assertThat(response.result(), equalTo(DocWriteResponse.Result.CREATED)); + + GetConnectorSecretResponse gotSecret = awaitGetConnectorSecret(secretId); + + assertThat(gotSecret.id(), equalTo(secretId)); + assertThat(gotSecret.value(), equalTo(value)); + } + public void testDeleteConnectorSecret() throws Exception { PostConnectorSecretRequest createSecretRequest = ConnectorSecretsTestUtils.getRandomPostConnectorSecretRequest(); PostConnectorSecretResponse createdSecret = awaitPostConnectorSecret(createSecretRequest); @@ -89,6 +107,39 @@ public void onFailure(Exception e) { return response; } + private PutConnectorSecretResponse awaitPutConnectorSecret(PutConnectorSecretRequest secretRequest) throws Exception { + CountDownLatch latch = new CountDownLatch(1); + + final AtomicReference responseRef = new AtomicReference<>(null); + final AtomicReference exception = new AtomicReference<>(null); + + connectorSecretsIndexService.createSecretWithDocId(secretRequest, new ActionListener<>() { + @Override + public void onResponse(PutConnectorSecretResponse putConnectorSecretResponse) { + responseRef.set(putConnectorSecretResponse); + latch.countDown(); + } + + @Override + public void onFailure(Exception e) { + exception.set(e); + latch.countDown(); + } + }); + + if (exception.get() != null) { + throw exception.get(); + } + + boolean requestTimedOut = latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS); + PutConnectorSecretResponse response = responseRef.get(); + + assertTrue("Timeout waiting for post request", requestTimedOut); + assertNotNull("Received null response from put request", response); + + return response; + } + private GetConnectorSecretResponse awaitGetConnectorSecret(String connectorSecretId) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsTestUtils.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsTestUtils.java index 13051505f9c4d..ec7d8b9a58543 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsTestUtils.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/ConnectorSecretsTestUtils.java @@ -7,16 +7,20 @@ package org.elasticsearch.xpack.application.connector.secrets; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.xpack.application.connector.secrets.action.DeleteConnectorSecretRequest; import org.elasticsearch.xpack.application.connector.secrets.action.DeleteConnectorSecretResponse; import org.elasticsearch.xpack.application.connector.secrets.action.GetConnectorSecretRequest; import org.elasticsearch.xpack.application.connector.secrets.action.GetConnectorSecretResponse; import org.elasticsearch.xpack.application.connector.secrets.action.PostConnectorSecretRequest; import org.elasticsearch.xpack.application.connector.secrets.action.PostConnectorSecretResponse; +import org.elasticsearch.xpack.application.connector.secrets.action.PutConnectorSecretRequest; +import org.elasticsearch.xpack.application.connector.secrets.action.PutConnectorSecretResponse; import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength; import static org.elasticsearch.test.ESTestCase.randomAlphaOfLengthBetween; import static org.elasticsearch.test.ESTestCase.randomBoolean; +import static org.elasticsearch.test.ESTestCase.randomFrom; public class ConnectorSecretsTestUtils { @@ -38,6 +42,14 @@ public static PostConnectorSecretResponse getRandomPostConnectorSecretResponse() return new PostConnectorSecretResponse(randomAlphaOfLength(10)); } + public static PutConnectorSecretRequest getRandomPutConnectorSecretRequest() { + return new PutConnectorSecretRequest(randomAlphaOfLengthBetween(5, 15), randomAlphaOfLengthBetween(1, 20)); + } + + public static PutConnectorSecretResponse getRandomPutConnectorSecretResponse() { + return new PutConnectorSecretResponse(randomFrom(DocWriteResponse.Result.values())); + } + public static DeleteConnectorSecretRequest getRandomDeleteConnectorSecretRequest() { return new DeleteConnectorSecretRequest(randomAlphaOfLengthBetween(1, 20)); } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/PutConnectorSecretActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/PutConnectorSecretActionTests.java new file mode 100644 index 0000000000000..b7c7453611bdf --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/PutConnectorSecretActionTests.java @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.application.connector.secrets.ConnectorSecretsTestUtils; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class PutConnectorSecretActionTests extends ESTestCase { + + public void testValidate_WhenConnectorSecretIdIsPresent_ExpectNoValidationError() { + PutConnectorSecretRequest request = ConnectorSecretsTestUtils.getRandomPutConnectorSecretRequest(); + ActionRequestValidationException exception = request.validate(); + + assertThat(exception, nullValue()); + } + + public void testValidate_WhenConnectorSecretIdIsEmpty_ExpectValidationError() { + PutConnectorSecretRequest requestWithMissingValue = new PutConnectorSecretRequest("", ""); + ActionRequestValidationException exception = requestWithMissingValue.validate(); + + assertThat(exception, notNullValue()); + assertThat(exception.getMessage(), containsString("[id] cannot be [null] or [\"\"]")); + assertThat(exception.getMessage(), containsString("[value] cannot be [null] or [\"\"]")); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/PutConnectorSecretRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/PutConnectorSecretRequestBWCSerializingTests.java new file mode 100644 index 0000000000000..2264b68532d35 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/PutConnectorSecretRequestBWCSerializingTests.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.application.connector.secrets.ConnectorSecretsTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; + +public class PutConnectorSecretRequestBWCSerializingTests extends AbstractBWCWireSerializationTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return PutConnectorSecretRequest::new; + } + + @Override + protected PutConnectorSecretRequest createTestInstance() { + return ConnectorSecretsTestUtils.getRandomPutConnectorSecretRequest(); + } + + @Override + protected PutConnectorSecretRequest mutateInstance(PutConnectorSecretRequest instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected PutConnectorSecretRequest mutateInstanceForVersion(PutConnectorSecretRequest instance, TransportVersion version) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/PutConnectorSecretResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/PutConnectorSecretResponseBWCSerializingTests.java new file mode 100644 index 0000000000000..d2e16d345b8c0 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/PutConnectorSecretResponseBWCSerializingTests.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.application.connector.secrets.ConnectorSecretsTestUtils; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; + +public class PutConnectorSecretResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return PutConnectorSecretResponse::new; + } + + @Override + protected PutConnectorSecretResponse createTestInstance() { + return ConnectorSecretsTestUtils.getRandomPutConnectorSecretResponse(); + } + + @Override + protected PutConnectorSecretResponse mutateInstance(PutConnectorSecretResponse instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected PutConnectorSecretResponse mutateInstanceForVersion(PutConnectorSecretResponse instance, TransportVersion version) { + return instance; + } + +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportPutConnectorSecretActionTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportPutConnectorSecretActionTests.java new file mode 100644 index 0000000000000..35d7998d1caf9 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/secrets/action/TransportPutConnectorSecretActionTests.java @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.secrets.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.secrets.ConnectorSecretsTestUtils; +import org.junit.Before; + +import java.util.Collections; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static org.mockito.Mockito.mock; + +public class TransportPutConnectorSecretActionTests extends ESSingleNodeTestCase { + + private static final Long TIMEOUT_SECONDS = 10L; + + private final ThreadPool threadPool = new TestThreadPool(getClass().getName()); + private TransportPutConnectorSecretAction action; + + @Before + public void setup() { + TransportService transportService = new TransportService( + Settings.EMPTY, + mock(Transport.class), + threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + x -> null, + null, + Collections.emptySet() + ); + + action = new TransportPutConnectorSecretAction(transportService, mock(ActionFilters.class), client()); + } + + @Override + public void tearDown() throws Exception { + super.tearDown(); + ThreadPool.terminate(threadPool, TIMEOUT_SECONDS, TimeUnit.SECONDS); + } + + public void testPutConnectorSecret_ExpectNoWarnings() throws InterruptedException { + PutConnectorSecretRequest request = ConnectorSecretsTestUtils.getRandomPutConnectorSecretRequest(); + + executeRequest(request); + + ensureNoWarnings(); + } + + private void executeRequest(PutConnectorSecretRequest request) throws InterruptedException { + final CountDownLatch latch = new CountDownLatch(1); + action.doExecute(mock(Task.class), request, ActionListener.wrap(response -> latch.countDown(), exception -> latch.countDown())); + + boolean requestTimedOut = latch.await(TIMEOUT_SECONDS, TimeUnit.SECONDS); + + assertTrue("Timeout waiting for put request", requestTimedOut); + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 15477f8a1536b..c99b779df5a54 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -144,6 +144,7 @@ public class Constants { "cluster:admin/xpack/connector/secret/delete", "cluster:admin/xpack/connector/secret/get", "cluster:admin/xpack/connector/secret/post", + "cluster:admin/xpack/connector/secret/put", "cluster:admin/xpack/connector/sync_job/cancel", "cluster:admin/xpack/connector/sync_job/check_in", "cluster:admin/xpack/connector/sync_job/delete", diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java index 61646f5ff375b..8ec06c7273bbd 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java @@ -348,6 +348,7 @@ public void testElasticEnterpriseSearchServerAccount() { assertThat(role.cluster().check("cluster:admin/xpack/connector/secret/delete", request, authentication), is(true)); assertThat(role.cluster().check("cluster:admin/xpack/connector/secret/get", request, authentication), is(true)); assertThat(role.cluster().check("cluster:admin/xpack/connector/secret/post", request, authentication), is(true)); + assertThat(role.cluster().check("cluster:admin/xpack/connector/secret/put", request, authentication), is(true)); List.of( "search-" + randomAlphaOfLengthBetween(1, 20), From 7d522145def78ac55146eaf429d761d961ed1d42 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Tue, 6 Feb 2024 13:18:47 +0200 Subject: [PATCH 057/106] Refactor ApiKeyFieldNameTranslators to expose query builder translator (#105057) This exposes a public method that deep-copies a QueryBuilder, to be used for querying API Keys from the .security index, while also translating the query-level field names to index-level ones. Relates #104895 --- .../index/query/SimpleQueryStringBuilder.java | 8 +- .../action/apikey/QueryApiKeyRequest.java | 20 -- .../apikey/TransportQueryApiKeyAction.java | 44 +--- .../support/ApiKeyBoolQueryBuilder.java | 147 +---------- .../support/ApiKeyFieldNameTranslators.java | 245 +++++++++++++++++- .../TransportQueryApiKeyActionTests.java | 5 +- .../support/ApiKeyBoolQueryBuilderTests.java | 171 +++++++++++- 7 files changed, 414 insertions(+), 226 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java index 63cd598caa784..60769fc8fda77 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java @@ -280,14 +280,14 @@ public SimpleQueryStringBuilder flags(SimpleQueryStringFlag... flags) { return this; } - /** For testing and serialisation only. */ - SimpleQueryStringBuilder flags(int flags) { + /** For testing, builder instance copy, and serialisation only. */ + public SimpleQueryStringBuilder flags(int flags) { this.flags = flags; return this; } - /** For testing only: Return the flags set for this query. */ - int flags() { + /** For testing and instance copy only: Return the flags set for this query. */ + public int flags() { return this.flags; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/QueryApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/QueryApiKeyRequest.java index 6910eab73ea90..b494055b976ac 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/QueryApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/QueryApiKeyRequest.java @@ -7,11 +7,9 @@ package org.elasticsearch.xpack.core.security.action.apikey; -import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.TransportAction; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.query.QueryBuilder; @@ -62,24 +60,6 @@ public QueryApiKeyRequest( this.withLimitedBy = withLimitedBy; } - public QueryApiKeyRequest(StreamInput in) throws IOException { - super(in); - this.queryBuilder = in.readOptionalNamedWriteable(QueryBuilder.class); - this.from = in.readOptionalVInt(); - this.size = in.readOptionalVInt(); - if (in.readBoolean()) { - this.fieldSortBuilders = in.readCollectionAsList(FieldSortBuilder::new); - } else { - this.fieldSortBuilders = null; - } - this.searchAfterBuilder = in.readOptionalWriteable(SearchAfterBuilder::new); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_5_0)) { - this.withLimitedBy = in.readBoolean(); - } else { - this.withLimitedBy = false; - } - } - public QueryBuilder getQueryBuilder() { return queryBuilder; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportQueryApiKeyAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportQueryApiKeyAction.java index b1f73251cdb47..dd78edeae3c62 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportQueryApiKeyAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/apikey/TransportQueryApiKeyAction.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.SecurityContext; @@ -23,13 +22,11 @@ import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.security.authc.ApiKeyService; import org.elasticsearch.xpack.security.support.ApiKeyBoolQueryBuilder; -import org.elasticsearch.xpack.security.support.ApiKeyFieldNameTranslators; -import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Consumer; +import static org.elasticsearch.xpack.security.support.ApiKeyFieldNameTranslators.translateFieldSortBuilders; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MAIN_ALIAS; public final class TransportQueryApiKeyAction extends TransportAction { @@ -109,43 +106,4 @@ protected void doExecute(Task task, QueryApiKeyRequest request, ActionListener fieldSortBuilders, - SearchSourceBuilder searchSourceBuilder, - Consumer fieldNameVisitor - ) { - fieldSortBuilders.forEach(fieldSortBuilder -> { - if (fieldSortBuilder.getNestedSort() != null) { - throw new IllegalArgumentException("nested sorting is not supported for API Key query"); - } - if (FieldSortBuilder.DOC_FIELD_NAME.equals(fieldSortBuilder.getFieldName())) { - searchSourceBuilder.sort(fieldSortBuilder); - } else { - final String translatedFieldName = ApiKeyFieldNameTranslators.translate(fieldSortBuilder.getFieldName()); - fieldNameVisitor.accept(translatedFieldName); - if (translatedFieldName.equals(fieldSortBuilder.getFieldName())) { - searchSourceBuilder.sort(fieldSortBuilder); - } else { - final FieldSortBuilder translatedFieldSortBuilder = new FieldSortBuilder(translatedFieldName).order( - fieldSortBuilder.order() - ) - .missing(fieldSortBuilder.missing()) - .unmappedType(fieldSortBuilder.unmappedType()) - .setFormat(fieldSortBuilder.getFormat()); - - if (fieldSortBuilder.sortMode() != null) { - translatedFieldSortBuilder.sortMode(fieldSortBuilder.sortMode()); - } - if (fieldSortBuilder.getNestedSort() != null) { - translatedFieldSortBuilder.setNestedSort(fieldSortBuilder.getNestedSort()); - } - if (fieldSortBuilder.getNumericType() != null) { - translatedFieldSortBuilder.setNumericType(fieldSortBuilder.getNumericType()); - } - searchSourceBuilder.sort(translatedFieldSortBuilder); - } - } - }); - } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilder.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilder.java index 651427d07e651..8d167954b399a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilder.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilder.java @@ -10,33 +10,20 @@ import org.apache.lucene.search.Query; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.query.BoolQueryBuilder; -import org.elasticsearch.index.query.ExistsQueryBuilder; -import org.elasticsearch.index.query.IdsQueryBuilder; -import org.elasticsearch.index.query.MatchAllQueryBuilder; -import org.elasticsearch.index.query.MatchNoneQueryBuilder; -import org.elasticsearch.index.query.MatchQueryBuilder; -import org.elasticsearch.index.query.PrefixQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryRewriteContext; -import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.index.query.SearchExecutionContext; -import org.elasticsearch.index.query.SimpleQueryStringBuilder; -import org.elasticsearch.index.query.TermQueryBuilder; -import org.elasticsearch.index.query.TermsQueryBuilder; -import org.elasticsearch.index.query.WildcardQueryBuilder; -import org.elasticsearch.index.search.QueryParserHelper; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationField; import org.elasticsearch.xpack.security.authc.ApiKeyService; import java.io.IOException; -import java.util.HashMap; -import java.util.Map; import java.util.Set; import java.util.function.Consumer; import static org.elasticsearch.xpack.security.action.apikey.TransportQueryApiKeyAction.API_KEY_TYPE_RUNTIME_MAPPING_FIELD; +import static org.elasticsearch.xpack.security.support.ApiKeyFieldNameTranslators.translateQueryBuilderFields; public class ApiKeyBoolQueryBuilder extends BoolQueryBuilder { @@ -82,7 +69,7 @@ public static ApiKeyBoolQueryBuilder build( ) { final ApiKeyBoolQueryBuilder finalQuery = new ApiKeyBoolQueryBuilder(); if (queryBuilder != null) { - QueryBuilder processedQuery = doProcess(queryBuilder, fieldNameVisitor); + QueryBuilder processedQuery = translateQueryBuilderFields(queryBuilder, fieldNameVisitor); finalQuery.must(processedQuery); } finalQuery.filter(QueryBuilders.termQuery("doc_type", "api_key")); @@ -108,136 +95,6 @@ public static ApiKeyBoolQueryBuilder build( return finalQuery; } - private static QueryBuilder doProcess(QueryBuilder qb, Consumer fieldNameVisitor) { - if (qb instanceof final BoolQueryBuilder query) { - final BoolQueryBuilder newQuery = QueryBuilders.boolQuery() - .minimumShouldMatch(query.minimumShouldMatch()) - .adjustPureNegative(query.adjustPureNegative()) - .boost(query.boost()); - query.must().stream().map(q -> ApiKeyBoolQueryBuilder.doProcess(q, fieldNameVisitor)).forEach(newQuery::must); - query.should().stream().map(q -> ApiKeyBoolQueryBuilder.doProcess(q, fieldNameVisitor)).forEach(newQuery::should); - query.mustNot().stream().map(q -> ApiKeyBoolQueryBuilder.doProcess(q, fieldNameVisitor)).forEach(newQuery::mustNot); - query.filter().stream().map(q -> ApiKeyBoolQueryBuilder.doProcess(q, fieldNameVisitor)).forEach(newQuery::filter); - return newQuery; - } else if (qb instanceof MatchAllQueryBuilder) { - return qb; - } else if (qb instanceof IdsQueryBuilder) { - return qb; - } else if (qb instanceof final TermQueryBuilder query) { - final String translatedFieldName = ApiKeyFieldNameTranslators.translate(query.fieldName()); - fieldNameVisitor.accept(translatedFieldName); - return QueryBuilders.termQuery(translatedFieldName, query.value()) - .caseInsensitive(query.caseInsensitive()) - .boost(query.boost()); - } else if (qb instanceof final ExistsQueryBuilder query) { - final String translatedFieldName = ApiKeyFieldNameTranslators.translate(query.fieldName()); - fieldNameVisitor.accept(translatedFieldName); - return QueryBuilders.existsQuery(translatedFieldName).boost(query.boost()); - } else if (qb instanceof final TermsQueryBuilder query) { - if (query.termsLookup() != null) { - throw new IllegalArgumentException("terms query with terms lookup is not supported for API Key query"); - } - final String translatedFieldName = ApiKeyFieldNameTranslators.translate(query.fieldName()); - fieldNameVisitor.accept(translatedFieldName); - return QueryBuilders.termsQuery(translatedFieldName, query.getValues()).boost(query.boost()); - } else if (qb instanceof final PrefixQueryBuilder query) { - final String translatedFieldName = ApiKeyFieldNameTranslators.translate(query.fieldName()); - fieldNameVisitor.accept(translatedFieldName); - return QueryBuilders.prefixQuery(translatedFieldName, query.value()) - .caseInsensitive(query.caseInsensitive()) - .rewrite(query.rewrite()) - .boost(query.boost()); - } else if (qb instanceof final WildcardQueryBuilder query) { - final String translatedFieldName = ApiKeyFieldNameTranslators.translate(query.fieldName()); - fieldNameVisitor.accept(translatedFieldName); - return QueryBuilders.wildcardQuery(translatedFieldName, query.value()) - .caseInsensitive(query.caseInsensitive()) - .rewrite(query.rewrite()) - .boost(query.boost()); - } else if (qb instanceof final MatchQueryBuilder query) { - final String translatedFieldName = ApiKeyFieldNameTranslators.translate(query.fieldName()); - fieldNameVisitor.accept(translatedFieldName); - final MatchQueryBuilder matchQueryBuilder = QueryBuilders.matchQuery(translatedFieldName, query.value()); - if (query.operator() != null) { - matchQueryBuilder.operator(query.operator()); - } - if (query.analyzer() != null) { - matchQueryBuilder.analyzer(query.analyzer()); - } - if (query.fuzziness() != null) { - matchQueryBuilder.fuzziness(query.fuzziness()); - } - if (query.minimumShouldMatch() != null) { - matchQueryBuilder.minimumShouldMatch(query.minimumShouldMatch()); - } - if (query.fuzzyRewrite() != null) { - matchQueryBuilder.fuzzyRewrite(query.fuzzyRewrite()); - } - if (query.zeroTermsQuery() != null) { - matchQueryBuilder.zeroTermsQuery(query.zeroTermsQuery()); - } - matchQueryBuilder.prefixLength(query.prefixLength()) - .maxExpansions(query.maxExpansions()) - .fuzzyTranspositions(query.fuzzyTranspositions()) - .lenient(query.lenient()) - .autoGenerateSynonymsPhraseQuery(query.autoGenerateSynonymsPhraseQuery()) - .boost(query.boost()); - return matchQueryBuilder; - } else if (qb instanceof final RangeQueryBuilder query) { - if (query.relation() != null) { - throw new IllegalArgumentException("range query with relation is not supported for API Key query"); - } - final String translatedFieldName = ApiKeyFieldNameTranslators.translate(query.fieldName()); - fieldNameVisitor.accept(translatedFieldName); - final RangeQueryBuilder newQuery = QueryBuilders.rangeQuery(translatedFieldName); - if (query.format() != null) { - newQuery.format(query.format()); - } - if (query.timeZone() != null) { - newQuery.timeZone(query.timeZone()); - } - if (query.from() != null) { - newQuery.from(query.from()).includeLower(query.includeLower()); - } - if (query.to() != null) { - newQuery.to(query.to()).includeUpper(query.includeUpper()); - } - return newQuery.boost(query.boost()); - } else if (qb instanceof final SimpleQueryStringBuilder simpleQueryStringBuilder) { - if (simpleQueryStringBuilder.fields().isEmpty()) { - simpleQueryStringBuilder.field("*"); - } - // override lenient if querying all the fields, because, due to different field mappings, - // the query parsing will almost certainly fail otherwise - if (QueryParserHelper.hasAllFieldsWildcard(simpleQueryStringBuilder.fields().keySet())) { - simpleQueryStringBuilder.lenient(true); - } - Map requestedFields = new HashMap<>(simpleQueryStringBuilder.fields()); - simpleQueryStringBuilder.fields().clear(); - for (Map.Entry requestedFieldNameOrPattern : requestedFields.entrySet()) { - for (String translatedField : ApiKeyFieldNameTranslators.translatePattern(requestedFieldNameOrPattern.getKey())) { - simpleQueryStringBuilder.fields() - .compute( - translatedField, - (k, v) -> (v == null) ? requestedFieldNameOrPattern.getValue() : v * requestedFieldNameOrPattern.getValue() - ); - fieldNameVisitor.accept(translatedField); - } - } - if (simpleQueryStringBuilder.fields().isEmpty()) { - // A SimpleQueryStringBuilder with empty fields() will eventually produce a SimpleQueryString query - // that accesses all the fields, including disallowed ones. - // Instead, the behavior we're after is that a query that accesses only disallowed fields should - // not match any docs. - return new MatchNoneQueryBuilder(); - } else { - return simpleQueryStringBuilder; - } - } else { - throw new IllegalArgumentException("Query type [" + qb.getName() + "] is not supported for API Key query"); - } - } - @Override protected Query doToQuery(SearchExecutionContext context) throws IOException { context.setAllowedFields(ApiKeyBoolQueryBuilder::isIndexFieldNameAllowed); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/ApiKeyFieldNameTranslators.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/ApiKeyFieldNameTranslators.java index 29bf3ca5dd045..f8ea0663a7c51 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/ApiKeyFieldNameTranslators.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/ApiKeyFieldNameTranslators.java @@ -8,10 +8,33 @@ package org.elasticsearch.xpack.security.support; import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.query.AbstractQueryBuilder; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.ExistsQueryBuilder; +import org.elasticsearch.index.query.IdsQueryBuilder; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.MatchNoneQueryBuilder; +import org.elasticsearch.index.query.MatchQueryBuilder; +import org.elasticsearch.index.query.PrefixQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.RangeQueryBuilder; +import org.elasticsearch.index.query.SimpleQueryStringBuilder; +import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.index.query.TermsQueryBuilder; +import org.elasticsearch.index.query.WildcardQueryBuilder; +import org.elasticsearch.index.search.QueryParserHelper; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.sort.FieldSortBuilder; +import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Map; +import java.util.Objects; import java.util.Set; +import java.util.function.Consumer; import java.util.function.Function; import static org.elasticsearch.xpack.security.action.apikey.TransportQueryApiKeyAction.API_KEY_TYPE_RUNTIME_MAPPING_FIELD; @@ -38,37 +61,243 @@ public class ApiKeyFieldNameTranslators { ); } + /** + * Adds the {@param fieldSortBuilders} to the {@param searchSourceBuilder}, translating the field names, + * form query level to index level, see {@link #translate}. + * The optional {@param visitor} can be used to collect all the translated field names. + */ + public static void translateFieldSortBuilders( + List fieldSortBuilders, + SearchSourceBuilder searchSourceBuilder, + @Nullable Consumer visitor + ) { + final Consumer fieldNameVisitor = visitor != null ? visitor : ignored -> {}; + fieldSortBuilders.forEach(fieldSortBuilder -> { + if (fieldSortBuilder.getNestedSort() != null) { + throw new IllegalArgumentException("nested sorting is not supported for API Key query"); + } + if (FieldSortBuilder.DOC_FIELD_NAME.equals(fieldSortBuilder.getFieldName())) { + searchSourceBuilder.sort(fieldSortBuilder); + } else { + final String translatedFieldName = translate(fieldSortBuilder.getFieldName()); + fieldNameVisitor.accept(translatedFieldName); + if (translatedFieldName.equals(fieldSortBuilder.getFieldName())) { + searchSourceBuilder.sort(fieldSortBuilder); + } else { + final FieldSortBuilder translatedFieldSortBuilder = new FieldSortBuilder(translatedFieldName).order( + fieldSortBuilder.order() + ) + .missing(fieldSortBuilder.missing()) + .unmappedType(fieldSortBuilder.unmappedType()) + .setFormat(fieldSortBuilder.getFormat()); + + if (fieldSortBuilder.sortMode() != null) { + translatedFieldSortBuilder.sortMode(fieldSortBuilder.sortMode()); + } + if (fieldSortBuilder.getNestedSort() != null) { + translatedFieldSortBuilder.setNestedSort(fieldSortBuilder.getNestedSort()); + } + if (fieldSortBuilder.getNumericType() != null) { + translatedFieldSortBuilder.setNumericType(fieldSortBuilder.getNumericType()); + } + searchSourceBuilder.sort(translatedFieldSortBuilder); + } + } + }); + } + + /** + * Deep copies the passed-in {@param queryBuilder} translating all the field names, from query level to index level, + * see {@link #translate}. In general, the returned builder should create the same query as if the query were + * created by the passed in {@param queryBuilder}, only with the field names translated. + * Field name patterns (including "*"), are also replaced with the explicit index level field names whose + * associated query level field names match the pattern. + * The optional {@param visitor} can be used to collect all the translated field names. + */ + public static QueryBuilder translateQueryBuilderFields(QueryBuilder queryBuilder, @Nullable Consumer visitor) { + Objects.requireNonNull(queryBuilder, "unsupported \"null\" query builder for field name translation"); + final Consumer fieldNameVisitor = visitor != null ? visitor : ignored -> {}; + if (queryBuilder instanceof final BoolQueryBuilder query) { + final BoolQueryBuilder newQuery = QueryBuilders.boolQuery() + .minimumShouldMatch(query.minimumShouldMatch()) + .adjustPureNegative(query.adjustPureNegative()) + .boost(query.boost()) + .queryName(query.queryName()); + query.must().stream().map(q -> translateQueryBuilderFields(q, fieldNameVisitor)).forEach(newQuery::must); + query.should().stream().map(q -> translateQueryBuilderFields(q, fieldNameVisitor)).forEach(newQuery::should); + query.mustNot().stream().map(q -> translateQueryBuilderFields(q, fieldNameVisitor)).forEach(newQuery::mustNot); + query.filter().stream().map(q -> translateQueryBuilderFields(q, fieldNameVisitor)).forEach(newQuery::filter); + return newQuery; + } else if (queryBuilder instanceof final MatchAllQueryBuilder query) { + // just be safe and consistent to always return a new copy instance of the translated query builders + return QueryBuilders.matchAllQuery().boost(query.boost()).queryName(query.queryName()); + } else if (queryBuilder instanceof final IdsQueryBuilder query) { + // just be safe and consistent to always return a new copy instance of the translated query builders + return QueryBuilders.idsQuery().addIds(query.ids().toArray(new String[0])).boost(query.boost()).queryName(query.queryName()); + } else if (queryBuilder instanceof final TermQueryBuilder query) { + final String translatedFieldName = translate(query.fieldName()); + fieldNameVisitor.accept(translatedFieldName); + return QueryBuilders.termQuery(translatedFieldName, query.value()) + .caseInsensitive(query.caseInsensitive()) + .boost(query.boost()) + .queryName(query.queryName()); + } else if (queryBuilder instanceof final ExistsQueryBuilder query) { + final String translatedFieldName = translate(query.fieldName()); + fieldNameVisitor.accept(translatedFieldName); + return QueryBuilders.existsQuery(translatedFieldName).boost(query.boost()).queryName(query.queryName()); + } else if (queryBuilder instanceof final TermsQueryBuilder query) { + if (query.termsLookup() != null) { + throw new IllegalArgumentException("terms query with terms lookup is not supported for API Key query"); + } + final String translatedFieldName = translate(query.fieldName()); + fieldNameVisitor.accept(translatedFieldName); + return QueryBuilders.termsQuery(translatedFieldName, query.getValues()).boost(query.boost()).queryName(query.queryName()); + } else if (queryBuilder instanceof final PrefixQueryBuilder query) { + final String translatedFieldName = translate(query.fieldName()); + fieldNameVisitor.accept(translatedFieldName); + return QueryBuilders.prefixQuery(translatedFieldName, query.value()) + .caseInsensitive(query.caseInsensitive()) + .rewrite(query.rewrite()) + .boost(query.boost()) + .queryName(query.queryName()); + } else if (queryBuilder instanceof final WildcardQueryBuilder query) { + final String translatedFieldName = translate(query.fieldName()); + fieldNameVisitor.accept(translatedFieldName); + return QueryBuilders.wildcardQuery(translatedFieldName, query.value()) + .caseInsensitive(query.caseInsensitive()) + .rewrite(query.rewrite()) + .boost(query.boost()) + .queryName(query.queryName()); + } else if (queryBuilder instanceof final MatchQueryBuilder query) { + final String translatedFieldName = translate(query.fieldName()); + fieldNameVisitor.accept(translatedFieldName); + final MatchQueryBuilder matchQueryBuilder = QueryBuilders.matchQuery(translatedFieldName, query.value()); + if (query.operator() != null) { + matchQueryBuilder.operator(query.operator()); + } + if (query.analyzer() != null) { + matchQueryBuilder.analyzer(query.analyzer()); + } + if (query.fuzziness() != null) { + matchQueryBuilder.fuzziness(query.fuzziness()); + } + if (query.minimumShouldMatch() != null) { + matchQueryBuilder.minimumShouldMatch(query.minimumShouldMatch()); + } + if (query.fuzzyRewrite() != null) { + matchQueryBuilder.fuzzyRewrite(query.fuzzyRewrite()); + } + if (query.zeroTermsQuery() != null) { + matchQueryBuilder.zeroTermsQuery(query.zeroTermsQuery()); + } + matchQueryBuilder.prefixLength(query.prefixLength()) + .maxExpansions(query.maxExpansions()) + .fuzzyTranspositions(query.fuzzyTranspositions()) + .lenient(query.lenient()) + .autoGenerateSynonymsPhraseQuery(query.autoGenerateSynonymsPhraseQuery()) + .boost(query.boost()) + .queryName(query.queryName()); + return matchQueryBuilder; + } else if (queryBuilder instanceof final RangeQueryBuilder query) { + if (query.relation() != null) { + throw new IllegalArgumentException("range query with relation is not supported for API Key query"); + } + final String translatedFieldName = translate(query.fieldName()); + fieldNameVisitor.accept(translatedFieldName); + final RangeQueryBuilder newQuery = QueryBuilders.rangeQuery(translatedFieldName); + if (query.format() != null) { + newQuery.format(query.format()); + } + if (query.timeZone() != null) { + newQuery.timeZone(query.timeZone()); + } + if (query.from() != null) { + newQuery.from(query.from()).includeLower(query.includeLower()); + } + if (query.to() != null) { + newQuery.to(query.to()).includeUpper(query.includeUpper()); + } + return newQuery.boost(query.boost()).queryName(query.queryName()); + } else if (queryBuilder instanceof final SimpleQueryStringBuilder query) { + SimpleQueryStringBuilder simpleQueryStringBuilder = QueryBuilders.simpleQueryStringQuery(query.value()); + Map queryFields = new HashMap<>(query.fields()); + // be explicit that no field means all fields + if (queryFields.isEmpty()) { + queryFields.put("*", AbstractQueryBuilder.DEFAULT_BOOST); + } + // override "lenient" if querying all the fields, because, due to different field mappings, + // the query parsing will almost certainly fail otherwise + if (QueryParserHelper.hasAllFieldsWildcard(queryFields.keySet())) { + simpleQueryStringBuilder.lenient(true); + } else { + simpleQueryStringBuilder.lenient(query.lenient()); + } + // translate query-level field name patterns to index-level concrete field names + for (Map.Entry requestedFieldNameOrPattern : queryFields.entrySet()) { + for (String translatedField : translatePattern(requestedFieldNameOrPattern.getKey())) { + simpleQueryStringBuilder.fields() + .compute( + translatedField, + (k, v) -> (v == null) ? requestedFieldNameOrPattern.getValue() : v * requestedFieldNameOrPattern.getValue() + ); + fieldNameVisitor.accept(translatedField); + } + } + if (simpleQueryStringBuilder.fields().isEmpty()) { + // A SimpleQueryStringBuilder with empty fields() will eventually produce a SimpleQueryString + // Lucene query that accesses all the fields, including disallowed ones. + // Instead, the behavior we're after here is that a query that accesses only disallowed fields + // mustn't match any docs. + return new MatchNoneQueryBuilder().boost(simpleQueryStringBuilder.boost()).queryName(simpleQueryStringBuilder.queryName()); + } + return simpleQueryStringBuilder.analyzer(query.analyzer()) + .defaultOperator(query.defaultOperator()) + .minimumShouldMatch(query.minimumShouldMatch()) + .flags(query.flags()) + .type(query.type()) + .quoteFieldSuffix(query.quoteFieldSuffix()) + .analyzeWildcard(query.analyzeWildcard()) + .autoGenerateSynonymsPhraseQuery(query.autoGenerateSynonymsPhraseQuery()) + .fuzzyTranspositions(query.fuzzyTranspositions()) + .fuzzyMaxExpansions(query.fuzzyMaxExpansions()) + .fuzzyPrefixLength(query.fuzzyPrefixLength()) + .boost(query.boost()) + .queryName(query.queryName()); + } else { + throw new IllegalArgumentException("Query type [" + queryBuilder.getName() + "] is not supported for API Key query"); + } + } + /** * Translate the query level field name to index level field names. * It throws an exception if the field name is not explicitly allowed. */ - public static String translate(String fieldName) { + protected static String translate(String fieldName) { + // protected for testing if (Regex.isSimpleMatchPattern(fieldName)) { - throw new IllegalArgumentException("Field name pattern [" + fieldName + "] is not allowed for API Key query"); + throw new IllegalArgumentException("Field name pattern [" + fieldName + "] is not allowed for API Key query or aggregation"); } for (FieldNameTranslator translator : FIELD_NAME_TRANSLATORS) { if (translator.supports(fieldName)) { return translator.translate(fieldName); } } - throw new IllegalArgumentException("Field [" + fieldName + "] is not allowed for API Key query"); + throw new IllegalArgumentException("Field [" + fieldName + "] is not allowed for API Key query or aggregation"); } /** * Translates a query level field name pattern to the matching index level field names. * The result can be the empty set, if the pattern doesn't match any of the allowed index level field names. - * If the pattern is actually a concrete field name rather than a pattern, - * it is also translated, but only if the query level field name is allowed, otherwise an exception is thrown. */ - public static Set translatePattern(String fieldNameOrPattern) { + private static Set translatePattern(String fieldNameOrPattern) { Set indexFieldNames = new HashSet<>(); for (FieldNameTranslator translator : FIELD_NAME_TRANSLATORS) { if (translator.supports(fieldNameOrPattern)) { indexFieldNames.add(translator.translate(fieldNameOrPattern)); } } - // It's OK to "translate" to the empty set the concrete disallowed or unknown field names, because - // the SimpleQueryString query type is lenient in the sense that it ignores unknown fields and field name patterns, + // It's OK to "translate" to the empty set the concrete disallowed or unknown field names. + // For eg, the SimpleQueryString query type is lenient in the sense that it ignores unknown fields and field name patterns, // so this preprocessing can ignore them too. return indexFieldNames; } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportQueryApiKeyActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportQueryApiKeyActionTests.java index 4a2f707d3e902..1593fadf1802d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportQueryApiKeyActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/apikey/TransportQueryApiKeyActionTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.search.sort.SortMode; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.security.support.ApiKeyFieldNameTranslators; import java.util.ArrayList; import java.util.List; @@ -42,7 +43,7 @@ public void testTranslateFieldSortBuilders() { List sortFields = new ArrayList<>(); final SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.searchSource(); - TransportQueryApiKeyAction.translateFieldSortBuilders(originals, searchSourceBuilder, sortFields::add); + ApiKeyFieldNameTranslators.translateFieldSortBuilders(originals, searchSourceBuilder, sortFields::add); IntStream.range(0, originals.size()).forEach(i -> { final FieldSortBuilder original = originals.get(i); @@ -95,7 +96,7 @@ public void testNestedSortingIsNotAllowed() { fieldSortBuilder.setNestedSort(new NestedSortBuilder("name")); final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> TransportQueryApiKeyAction.translateFieldSortBuilders( + () -> ApiKeyFieldNameTranslators.translateFieldSortBuilders( List.of(fieldSortBuilder), SearchSourceBuilder.searchSource(), ignored -> {} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilderTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilderTests.java index 44b81b96e2154..fdc7b59528153 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilderTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilderTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.MatchNoneQueryBuilder; import org.elasticsearch.index.query.MatchQueryBuilder; +import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.index.query.MultiTermQueryBuilder; import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.PrefixQueryBuilder; @@ -23,6 +24,7 @@ import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.query.SimpleQueryStringBuilder; +import org.elasticsearch.index.query.SimpleQueryStringFlag; import org.elasticsearch.index.query.SpanQueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.query.TermsQueryBuilder; @@ -44,6 +46,7 @@ import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.List; +import java.util.Map; import java.util.function.Predicate; import static org.elasticsearch.test.LambdaMatchers.falseWith; @@ -119,6 +122,9 @@ public void testPrefixQueryBuilderPropertiesArePreserved() { if (randomBoolean()) { prefixQueryBuilder.boost(Math.abs(randomFloat())); } + if (randomBoolean()) { + prefixQueryBuilder.queryName(randomAlphaOfLengthBetween(0, 4)); + } if (randomBoolean()) { prefixQueryBuilder.caseInsensitive(randomBoolean()); } @@ -135,10 +141,156 @@ public void testPrefixQueryBuilderPropertiesArePreserved() { assertThat(prefixQueryBuilder2.fieldName(), is(ApiKeyFieldNameTranslators.translate(prefixQueryBuilder.fieldName()))); assertThat(prefixQueryBuilder2.value(), is(prefixQueryBuilder.value())); assertThat(prefixQueryBuilder2.boost(), is(prefixQueryBuilder.boost())); + assertThat(prefixQueryBuilder2.queryName(), is(prefixQueryBuilder.queryName())); assertThat(prefixQueryBuilder2.caseInsensitive(), is(prefixQueryBuilder.caseInsensitive())); assertThat(prefixQueryBuilder2.rewrite(), is(prefixQueryBuilder.rewrite())); } + public void testSimpleQueryBuilderWithAllFields() { + SimpleQueryStringBuilder simpleQueryStringBuilder = QueryBuilders.simpleQueryStringQuery(randomAlphaOfLength(4)); + if (randomBoolean()) { + if (randomBoolean()) { + simpleQueryStringBuilder.field("*"); + } else { + simpleQueryStringBuilder.field("*", Math.abs(randomFloat())); + } + } + if (randomBoolean()) { + simpleQueryStringBuilder.lenient(randomBoolean()); + } + List queryFields = new ArrayList<>(); + ApiKeyBoolQueryBuilder apiKeyMatchQueryBuilder = ApiKeyBoolQueryBuilder.build(simpleQueryStringBuilder, queryFields::add, null); + List mustQueries = apiKeyMatchQueryBuilder.must(); + assertThat(mustQueries, hasSize(1)); + assertThat(mustQueries.get(0), instanceOf(SimpleQueryStringBuilder.class)); + SimpleQueryStringBuilder simpleQueryStringBuilder2 = (SimpleQueryStringBuilder) mustQueries.get(0); + assertThat( + simpleQueryStringBuilder2.fields().keySet(), + containsInAnyOrder( + "creation_time", + "invalidation_time", + "expiration_time", + "api_key_invalidated", + "creator.principal", + "creator.realm", + "metadata_flattened", + "name", + "runtime_key_type" + ) + ); + assertThat(simpleQueryStringBuilder2.lenient(), is(true)); + assertThat( + queryFields, + containsInAnyOrder( + "doc_type", + "creation_time", + "invalidation_time", + "expiration_time", + "api_key_invalidated", + "creator.principal", + "creator.realm", + "metadata_flattened", + "name", + "runtime_key_type" + ) + ); + } + + public void testSimpleQueryBuilderPropertiesArePreserved() { + SimpleQueryStringBuilder simpleQueryStringBuilder = QueryBuilders.simpleQueryStringQuery(randomAlphaOfLength(4)); + if (randomBoolean()) { + simpleQueryStringBuilder.boost(Math.abs(randomFloat())); + } + if (randomBoolean()) { + simpleQueryStringBuilder.queryName(randomAlphaOfLengthBetween(0, 4)); + } + if (randomBoolean()) { + simpleQueryStringBuilder.analyzer(randomAlphaOfLength(4)); + } + if (randomBoolean()) { + simpleQueryStringBuilder.defaultOperator(randomFrom(Operator.OR, Operator.AND)); + } + if (randomBoolean()) { + simpleQueryStringBuilder.minimumShouldMatch(randomAlphaOfLength(4)); + } + if (randomBoolean()) { + simpleQueryStringBuilder.analyzeWildcard(randomBoolean()); + } + if (randomBoolean()) { + simpleQueryStringBuilder.autoGenerateSynonymsPhraseQuery(randomBoolean()); + } + if (randomBoolean()) { + simpleQueryStringBuilder.lenient(randomBoolean()); + } + if (randomBoolean()) { + simpleQueryStringBuilder.type(randomFrom(MultiMatchQueryBuilder.Type.values())); + } + if (randomBoolean()) { + simpleQueryStringBuilder.quoteFieldSuffix(randomAlphaOfLength(4)); + } + if (randomBoolean()) { + simpleQueryStringBuilder.fuzzyTranspositions(randomBoolean()); + } + if (randomBoolean()) { + simpleQueryStringBuilder.fuzzyMaxExpansions(randomIntBetween(1, 10)); + } + if (randomBoolean()) { + simpleQueryStringBuilder.fuzzyPrefixLength(randomIntBetween(1, 10)); + } + if (randomBoolean()) { + simpleQueryStringBuilder.flags( + randomSubsetOf(randomIntBetween(0, 3), SimpleQueryStringFlag.values()).toArray(new SimpleQueryStringFlag[0]) + ); + } + // at least one field for this test + int nFields = randomIntBetween(1, 4); + for (int i = 0; i < nFields; i++) { + simpleQueryStringBuilder.field(randomValidFieldName(), Math.abs(randomFloat())); + } + List queryFields = new ArrayList<>(); + ApiKeyBoolQueryBuilder apiKeyMatchQueryBuilder = ApiKeyBoolQueryBuilder.build( + simpleQueryStringBuilder, + queryFields::add, + randomFrom( + AuthenticationTests.randomApiKeyAuthentication(AuthenticationTests.randomUser(), randomUUID()), + AuthenticationTests.randomAuthentication(null, null), + null + ) + ); + List mustQueries = apiKeyMatchQueryBuilder.must(); + assertThat(mustQueries, hasSize(1)); + assertThat(mustQueries.get(0), instanceOf(SimpleQueryStringBuilder.class)); + SimpleQueryStringBuilder simpleQueryStringBuilder2 = (SimpleQueryStringBuilder) mustQueries.get(0); + assertThat(simpleQueryStringBuilder2.value(), is(simpleQueryStringBuilder.value())); + assertThat(simpleQueryStringBuilder2.boost(), is(simpleQueryStringBuilder.boost())); + assertThat(simpleQueryStringBuilder2.queryName(), is(simpleQueryStringBuilder.queryName())); + assertThat(simpleQueryStringBuilder2.fields().size(), is(simpleQueryStringBuilder.fields().size())); + for (Map.Entry fieldEntry : simpleQueryStringBuilder.fields().entrySet()) { + assertThat( + simpleQueryStringBuilder2.fields().get(ApiKeyFieldNameTranslators.translate(fieldEntry.getKey())), + is(fieldEntry.getValue()) + ); + } + for (String field : simpleQueryStringBuilder2.fields().keySet()) { + assertThat(queryFields, hasItem(field)); + } + assertThat(simpleQueryStringBuilder2.analyzer(), is(simpleQueryStringBuilder.analyzer())); + assertThat(simpleQueryStringBuilder2.defaultOperator(), is(simpleQueryStringBuilder.defaultOperator())); + assertThat(simpleQueryStringBuilder2.minimumShouldMatch(), is(simpleQueryStringBuilder.minimumShouldMatch())); + assertThat(simpleQueryStringBuilder2.analyzeWildcard(), is(simpleQueryStringBuilder.analyzeWildcard())); + assertThat( + simpleQueryStringBuilder2.autoGenerateSynonymsPhraseQuery(), + is(simpleQueryStringBuilder.autoGenerateSynonymsPhraseQuery()) + ); + assertThat(simpleQueryStringBuilder2.lenient(), is(simpleQueryStringBuilder.lenient())); + assertThat(simpleQueryStringBuilder2.type(), is(simpleQueryStringBuilder.type())); + assertThat(simpleQueryStringBuilder2.quoteFieldSuffix(), is(simpleQueryStringBuilder.quoteFieldSuffix())); + assertThat(simpleQueryStringBuilder2.fuzzyTranspositions(), is(simpleQueryStringBuilder.fuzzyTranspositions())); + assertThat(simpleQueryStringBuilder2.fuzzyMaxExpansions(), is(simpleQueryStringBuilder.fuzzyMaxExpansions())); + assertThat(simpleQueryStringBuilder2.fuzzyPrefixLength(), is(simpleQueryStringBuilder.fuzzyPrefixLength())); + assertThat(simpleQueryStringBuilder2.flags(), is(simpleQueryStringBuilder.flags())); + } + public void testMatchQueryBuilderPropertiesArePreserved() { // the match query has many properties, that all must be preserved after limiting for API Key docs only Authentication authentication = randomFrom( @@ -151,6 +303,9 @@ public void testMatchQueryBuilderPropertiesArePreserved() { if (randomBoolean()) { matchQueryBuilder.boost(Math.abs(randomFloat())); } + if (randomBoolean()) { + matchQueryBuilder.queryName(randomAlphaOfLengthBetween(0, 4)); + } if (randomBoolean()) { matchQueryBuilder.operator(randomFrom(Operator.OR, Operator.AND)); } @@ -205,6 +360,7 @@ public void testMatchQueryBuilderPropertiesArePreserved() { assertThat(matchQueryBuilder2.lenient(), is(matchQueryBuilder.lenient())); assertThat(matchQueryBuilder2.autoGenerateSynonymsPhraseQuery(), is(matchQueryBuilder.autoGenerateSynonymsPhraseQuery())); assertThat(matchQueryBuilder2.boost(), is(matchQueryBuilder.boost())); + assertThat(matchQueryBuilder2.queryName(), is(matchQueryBuilder.queryName())); } public void testQueryForDomainAuthentication() { @@ -925,20 +1081,27 @@ private void assertCommonFilterQueries(ApiKeyBoolQueryBuilder qb, Authentication private QueryBuilder randomSimpleQuery(String fieldName) { return switch (randomIntBetween(0, 9)) { - case 0 -> QueryBuilders.termQuery(fieldName, randomAlphaOfLengthBetween(3, 8)); - case 1 -> QueryBuilders.termsQuery(fieldName, randomArray(1, 3, String[]::new, () -> randomAlphaOfLengthBetween(3, 8))); + case 0 -> QueryBuilders.termQuery(fieldName, randomAlphaOfLengthBetween(3, 8)) + .boost(Math.abs(randomFloat())) + .queryName(randomAlphaOfLength(4)); + case 1 -> QueryBuilders.termsQuery(fieldName, randomArray(1, 3, String[]::new, () -> randomAlphaOfLengthBetween(3, 8))) + .boost(Math.abs(randomFloat())) + .queryName(randomAlphaOfLength(4)); case 2 -> QueryBuilders.idsQuery().addIds(randomArray(1, 3, String[]::new, () -> randomAlphaOfLength(22))); case 3 -> QueryBuilders.prefixQuery(fieldName, "prod-"); case 4 -> QueryBuilders.wildcardQuery(fieldName, "prod-*-east-*"); case 5 -> QueryBuilders.matchAllQuery(); - case 6 -> QueryBuilders.existsQuery(fieldName); + case 6 -> QueryBuilders.existsQuery(fieldName).boost(Math.abs(randomFloat())).queryName(randomAlphaOfLength(4)); case 7 -> QueryBuilders.rangeQuery(fieldName) .from(Instant.now().minus(1, ChronoUnit.DAYS).toEpochMilli(), randomBoolean()) .to(Instant.now().toEpochMilli(), randomBoolean()); case 8 -> QueryBuilders.simpleQueryStringQuery("+rest key*") .field(fieldName) .lenient(randomBoolean()) - .analyzeWildcard(randomBoolean()); + .analyzeWildcard(randomBoolean()) + .fuzzyPrefixLength(randomIntBetween(1, 10)) + .fuzzyMaxExpansions(randomIntBetween(1, 10)) + .fuzzyTranspositions(randomBoolean()); case 9 -> QueryBuilders.matchQuery(fieldName, randomAlphaOfLengthBetween(3, 8)) .operator(randomFrom(Operator.OR, Operator.AND)) .lenient(randomBoolean()) From 6cf92584ba6e635449109cbd784b63e920fbebca Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Tue, 6 Feb 2024 22:31:35 +1100 Subject: [PATCH 058/106] [Docs] Minor tweak for balance settings docs (#105170) Relates: #105119 --- .../modules/cluster/shards_allocation.asciidoc | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/docs/reference/modules/cluster/shards_allocation.asciidoc b/docs/reference/modules/cluster/shards_allocation.asciidoc index a73a3906bd3fd..1e425c77d1264 100644 --- a/docs/reference/modules/cluster/shards_allocation.asciidoc +++ b/docs/reference/modules/cluster/shards_allocation.asciidoc @@ -55,11 +55,12 @@ one of the active allocation ids in the cluster state. (<>) A shortcut to set both `cluster.routing.allocation.node_concurrent_incoming_recoveries` and - `cluster.routing.allocation.node_concurrent_outgoing_recoveries`. Defaults - to `2`. Increasing this setting may cause shard movements to have a - performance impact on other activity in your cluster, but may not make - shard movements complete noticeably sooner. We do not recommend adjusting - this setting from its default of `2`. + `cluster.routing.allocation.node_concurrent_outgoing_recoveries`. The + value of this setting takes effect only when the more specific setting is + not configured. Defaults to `2`. Increasing this setting may cause shard + movements to have a performance impact on other activity in your cluster, + but may not make shard movements complete noticeably sooner. We do not + recommend adjusting this setting from its default of `2`. `cluster.routing.allocation.node_initial_primaries_recoveries`:: (<>) From bfa21b511dc8dbf8da7c9d0c573142af80fae88e Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Tue, 6 Feb 2024 13:37:03 +0200 Subject: [PATCH 059/106] For empty mappings use a LocalRelation (#105081) Fixes https://github.com/elastic/elasticsearch/issues/104809 by converting a plan to a local relation when there is no mapping for the index pattern. --- docs/changelog/105081.yaml | 6 ++ .../src/main/resources/stats.csv-spec | 3 +- .../xpack/esql/action/EsqlActionIT.java | 84 +++++++++++++++++++ .../xpack/esql/analysis/Analyzer.java | 6 +- .../esql/optimizer/LogicalPlanOptimizer.java | 11 ++- .../LocalLogicalPlanOptimizerTests.java | 5 +- .../LocalPhysicalPlanOptimizerTests.java | 3 +- .../optimizer/LogicalPlanOptimizerTests.java | 43 ++++++++-- .../optimizer/PhysicalPlanOptimizerTests.java | 4 +- .../xpack/esql/planner/FilterTests.java | 3 +- .../esql/plugin/DataNodeRequestTests.java | 3 +- 11 files changed, 152 insertions(+), 19 deletions(-) create mode 100644 docs/changelog/105081.yaml diff --git a/docs/changelog/105081.yaml b/docs/changelog/105081.yaml new file mode 100644 index 0000000000000..efa686bd7b4a4 --- /dev/null +++ b/docs/changelog/105081.yaml @@ -0,0 +1,6 @@ +pr: 105081 +summary: For empty mappings use a `LocalRelation` +area: ES|QL +type: bug +issues: + - 104809 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index fbb38df87ed75..22d0f8c307c12 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -1139,12 +1139,13 @@ FROM employees | STATS x = CONCAT(TO_STRING(ROUND(AVG(salary % 3))), TO_STRING(MAX(emp_no))), y = ROUND((MIN(emp_no / 3) + PI() - MEDIAN(salary))/E()) BY z = languages % 2 +| SORT z ; x:s | y:d | z:i -1.010029 | -16452.0 | null 1.010100 | -15260.0 | 0 1.010097 | -16701.0 | 1 +1.010029 | -16452.0 | null ; nestedAggsOverGroupingWithAlias#[skip:-8.12.99,reason:supported in 8.13] diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java index 04e46d8ff5425..c945db951f964 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java @@ -10,6 +10,7 @@ import org.elasticsearch.Build; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -1413,6 +1414,89 @@ public void testCountTextField() { } } + public void testQueryOnEmptyMappingIndex() { + createIndex("empty-test", Settings.EMPTY); + createIndex("empty-test2", Settings.EMPTY); + IndicesAliasesRequestBuilder indicesAliasesRequestBuilder = indicesAdmin().prepareAliases() + .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("empty-test").alias("alias-test")) + .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("empty-test2").alias("alias-test")); + indicesAdmin().aliases(indicesAliasesRequestBuilder.request()).actionGet(); + + String[] indexPatterns = new String[] { "empty-test", "empty-test,empty-test2", "empty-test*", "alias-test", "*-test*" }; + String from = "FROM " + randomFrom(indexPatterns) + " "; + + assertEmptyIndexQueries(from); + + try (EsqlQueryResponse resp = run(from + "[METADATA _source] | EVAL x = 123")) { + assertFalse(resp.values().hasNext()); + assertThat(resp.columns(), equalTo(List.of(new ColumnInfo("_source", "_source"), new ColumnInfo("x", "integer")))); + } + + try (EsqlQueryResponse resp = run(from)) { + assertFalse(resp.values().hasNext()); + assertThat(resp.columns(), equalTo(List.of(new ColumnInfo("", "null")))); + } + } + + public void testQueryOnEmptyDataIndex() { + createIndex("empty_data-test", Settings.EMPTY); + assertAcked(client().admin().indices().prepareCreate("empty_data-test2").setMapping("name", "type=text")); + IndicesAliasesRequestBuilder indicesAliasesRequestBuilder = indicesAdmin().prepareAliases() + .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("empty_data-test").alias("alias-empty_data-test")) + .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("empty_data-test2").alias("alias-empty_data-test")); + indicesAdmin().aliases(indicesAliasesRequestBuilder.request()).actionGet(); + + String[] indexPatterns = new String[] { + "empty_data-test2", + "empty_data-test,empty_data-test2", + "alias-empty_data-test", + "*data-test" }; + String from = "FROM " + randomFrom(indexPatterns) + " "; + + assertEmptyIndexQueries(from); + + try (EsqlQueryResponse resp = run(from + "[METADATA _source] | EVAL x = 123")) { + assertFalse(resp.values().hasNext()); + assertThat( + resp.columns(), + equalTo(List.of(new ColumnInfo("name", "text"), new ColumnInfo("_source", "_source"), new ColumnInfo("x", "integer"))) + ); + } + + try (EsqlQueryResponse resp = run(from)) { + assertFalse(resp.values().hasNext()); + assertThat(resp.columns(), equalTo(List.of(new ColumnInfo("name", "text")))); + } + } + + private void assertEmptyIndexQueries(String from) { + try (EsqlQueryResponse resp = run(from + "[METADATA _source] | KEEP _source | LIMIT 1")) { + assertFalse(resp.values().hasNext()); + assertThat(resp.columns(), equalTo(List.of(new ColumnInfo("_source", "_source")))); + } + + try (EsqlQueryResponse resp = run(from + "| EVAL y = 1 | KEEP y | LIMIT 1 | EVAL x = 1")) { + assertFalse(resp.values().hasNext()); + assertThat(resp.columns(), equalTo(List.of(new ColumnInfo("y", "integer"), new ColumnInfo("x", "integer")))); + } + + try (EsqlQueryResponse resp = run(from + "| STATS c = count()")) { + assertTrue(resp.values().hasNext()); + Iterator row = resp.values().next(); + assertThat(row.next(), equalTo((long) 0)); + assertThat(resp.columns(), equalTo(List.of(new ColumnInfo("c", "long")))); + } + + try (EsqlQueryResponse resp = run(from + "| STATS c = count() | EVAL x = 123")) { + assertTrue(resp.values().hasNext()); + Iterator row = resp.values().next(); + assertThat(row.next(), equalTo((long) 0)); + assertThat(row.next(), equalTo(123)); + assertFalse(row.hasNext()); + assertThat(resp.columns(), equalTo(List.of(new ColumnInfo("c", "long"), new ColumnInfo("x", "integer")))); + } + } + private void createNestedMappingIndex(String indexName) throws IOException { XContentBuilder builder = JsonXContent.contentBuilder(); builder.startObject(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index 5fb56f674f5d7..dfcfc702fd5cd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -83,8 +83,10 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.NESTED; public class Analyzer extends ParameterizedRuleExecutor { - static final List NO_FIELDS = List.of( - new ReferenceAttribute(Source.EMPTY, "", DataTypes.NULL, null, Nullability.TRUE, null, false) + // marker list of attributes for plans that do not have any concrete fields to return, but have other computed columns to return + // ie from test | stats c = count(*) + public static final List NO_FIELDS = List.of( + new ReferenceAttribute(Source.EMPTY, "", DataTypes.NULL, null, Nullability.TRUE, null, true) ); private static final Iterable> rules; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index 71595b074afc7..58b5630c21a79 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -152,7 +152,8 @@ protected static List> rules() { // lastly replace surrogate functions new SubstituteSurrogates(), new ReplaceRegexMatch(), - new ReplaceAliasingEvalWithProject() + new ReplaceAliasingEvalWithProject(), + new SkipQueryOnEmptyMappings() // new NormalizeAggregate(), - waits on https://github.com/elastic/elasticsearch/issues/100634 ); @@ -704,6 +705,14 @@ protected LogicalPlan rule(UnaryPlan plan) { } } + static class SkipQueryOnEmptyMappings extends OptimizerRules.OptimizerRule { + + @Override + protected LogicalPlan rule(EsRelation plan) { + return plan.index().concreteIndices().isEmpty() ? new LocalRelation(plan.source(), plan.output(), LocalSupplier.EMPTY) : plan; + } + } + @SuppressWarnings("removal") static class PropagateEmptyRelation extends OptimizerRules.OptimizerRule { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java index 4b01a93b7e709..1b2210dbd5f4e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java @@ -39,6 +39,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Set; import static java.util.Collections.emptyMap; import static org.elasticsearch.xpack.esql.EsqlTestUtils.L; @@ -69,7 +70,7 @@ public static void init() { parser = new EsqlParser(); mapping = loadMapping("mapping-basic.json"); - EsIndex test = new EsIndex("test", mapping); + EsIndex test = new EsIndex("test", mapping, Set.of("test")); IndexResolution getIndexResult = IndexResolution.valid(test); logicalOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(EsqlTestUtils.TEST_CFG)); @@ -321,7 +322,7 @@ public void testSparseDocument() throws Exception { SearchStats searchStats = statsForExistingField("field000", "field001", "field002", "field003", "field004"); - EsIndex index = new EsIndex("large", large); + EsIndex index = new EsIndex("large", large, Set.of("large")); IndexResolution getIndexResult = IndexResolution.valid(index); var logicalOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(EsqlTestUtils.TEST_CFG)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index 7950cf0f1d335..80cc7d9a52a4b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -58,6 +58,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.stream.Collectors; import static java.util.Arrays.asList; @@ -143,7 +144,7 @@ public void init() { private Analyzer makeAnalyzer(String mappingFileName, EnrichResolution enrichResolution) { var mapping = loadMapping(mappingFileName); - EsIndex test = new EsIndex("test", mapping); + EsIndex test = new EsIndex("test", mapping, Set.of("test")); IndexResolution getIndexResult = IndexResolution.valid(test); return new Analyzer(new AnalyzerContext(config, functionRegistry, getIndexResult, enrichResolution), new Verifier(new Metrics())); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 06b81d9c4608e..6cbb629a6843a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils; +import org.elasticsearch.xpack.esql.analysis.EnrichResolution; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThan; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanOrEqual; @@ -85,9 +86,11 @@ import java.util.List; import java.util.Map; +import java.util.Set; import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; +import static java.util.Collections.emptySet; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.esql.EsqlTestUtils.L; import static org.elasticsearch.xpack.esql.EsqlTestUtils.TEST_VERIFIER; @@ -96,6 +99,7 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.loadMapping; import static org.elasticsearch.xpack.esql.EsqlTestUtils.localSource; import static org.elasticsearch.xpack.esql.EsqlTestUtils.withDefaultLimitWarning; +import static org.elasticsearch.xpack.esql.analysis.Analyzer.NO_FIELDS; import static org.elasticsearch.xpack.esql.type.EsqlDataTypes.GEO_POINT; import static org.elasticsearch.xpack.ql.TestUtils.relation; import static org.elasticsearch.xpack.ql.tree.Source.EMPTY; @@ -124,21 +128,17 @@ public class LogicalPlanOptimizerTests extends ESTestCase { private static Map mapping; private static Map mappingAirports; private static Analyzer analyzerAirports; + private static EnrichResolution enrichResolution; @BeforeClass public static void init() { parser = new EsqlParser(); logicalOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(EsqlTestUtils.TEST_CFG)); - var enrichResolution = AnalyzerTestUtils.loadEnrichPolicyResolution( - "languages_idx", - "id", - "languages_idx", - "mapping-languages.json" - ); + enrichResolution = AnalyzerTestUtils.loadEnrichPolicyResolution("languages_idx", "id", "languages_idx", "mapping-languages.json"); // Most tests used data from the test index, so we load it here, and use it in the plan() function. mapping = loadMapping("mapping-basic.json"); - EsIndex test = new EsIndex("test", mapping); + EsIndex test = new EsIndex("test", mapping, Set.of("test")); IndexResolution getIndexResult = IndexResolution.valid(test); analyzer = new Analyzer( new AnalyzerContext(EsqlTestUtils.TEST_CFG, new EsqlFunctionRegistry(), getIndexResult, enrichResolution), @@ -147,7 +147,7 @@ public static void init() { // Some tests use data from the airports index, so we load it here, and use it in the plan_airports() function. mappingAirports = loadMapping("mapping-airports.json"); - EsIndex airports = new EsIndex("airports", mappingAirports); + EsIndex airports = new EsIndex("airports", mappingAirports, Set.of("airports")); IndexResolution getIndexResultAirports = IndexResolution.valid(airports); analyzerAirports = new Analyzer( new AnalyzerContext(EsqlTestUtils.TEST_CFG, new EsqlFunctionRegistry(), getIndexResultAirports, enrichResolution), @@ -3182,6 +3182,33 @@ public void testStatsWithCanonicalAggregate() throws Exception { assertThat(Expressions.attribute(fields.get(1)), is(Expressions.attribute(sum_argument))); } + public void testEmptyMappingIndex() { + EsIndex empty = new EsIndex("empty_test", emptyMap(), emptySet()); + IndexResolution getIndexResultAirports = IndexResolution.valid(empty); + var analyzer = new Analyzer( + new AnalyzerContext(EsqlTestUtils.TEST_CFG, new EsqlFunctionRegistry(), getIndexResultAirports, enrichResolution), + TEST_VERIFIER + ); + + var plan = logicalOptimizer.optimize(analyzer.analyze(parser.createStatement("from empty_test"))); + as(plan, LocalRelation.class); + assertThat(plan.output(), equalTo(NO_FIELDS)); + + plan = logicalOptimizer.optimize(analyzer.analyze(parser.createStatement("from empty_test [metadata _id] | eval x = 1"))); + as(plan, LocalRelation.class); + assertThat(Expressions.names(plan.output()), contains("_id", "x")); + + plan = logicalOptimizer.optimize(analyzer.analyze(parser.createStatement("from empty_test [metadata _id, _version] | limit 5"))); + as(plan, LocalRelation.class); + assertThat(Expressions.names(plan.output()), contains("_id", "_version")); + + plan = logicalOptimizer.optimize( + analyzer.analyze(parser.createStatement("from empty_test | eval x = \"abc\" | enrich languages_idx on x")) + ); + LocalRelation local = as(plan, LocalRelation.class); + assertThat(Expressions.names(local.output()), contains(NO_FIELDS.get(0).name(), "x", "language_code", "language_name")); + } + private LogicalPlan optimizedPlan(String query) { return plan(query); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 74b640a723d3f..066d6ef5afc04 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -176,7 +176,7 @@ public void init() { mapper = new Mapper(functionRegistry); // Most tests used data from the test index, so we load it here, and use it in the plan() function. mapping = loadMapping("mapping-basic.json"); - EsIndex test = new EsIndex("test", mapping); + EsIndex test = new EsIndex("test", mapping, Set.of("test")); IndexResolution getIndexResult = IndexResolution.valid(test); var enrichResolution = setupEnrichResolution(); analyzer = new Analyzer(new AnalyzerContext(config, functionRegistry, getIndexResult, enrichResolution), TEST_VERIFIER); @@ -194,7 +194,7 @@ public void init() { // Some tests use data from the airports index, so we load it here, and use it in the plan_airports() function. mappingAirports = loadMapping("mapping-airports.json"); - EsIndex airports = new EsIndex("airports", mappingAirports); + EsIndex airports = new EsIndex("airports", mappingAirports, Set.of("airports")); IndexResolution getIndexResultAirports = IndexResolution.valid(airports); analyzerAirports = new Analyzer( new AnalyzerContext(config, functionRegistry, getIndexResultAirports, enrichResolution), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java index 52620a0a55af7..eef109cb2830e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java @@ -41,6 +41,7 @@ import java.io.UncheckedIOException; import java.util.List; import java.util.Map; +import java.util.Set; import static java.util.Arrays.asList; import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; @@ -72,7 +73,7 @@ public static void init() { parser = new EsqlParser(); mapping = loadMapping("mapping-basic.json"); - EsIndex test = new EsIndex("test", mapping); + EsIndex test = new EsIndex("test", mapping, Set.of("test")); IndexResolution getIndexResult = IndexResolution.valid(test); logicalOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(EsqlTestUtils.TEST_CFG)); physicalPlanOptimizer = new PhysicalPlanOptimizer(new PhysicalOptimizerContext(EsqlTestUtils.TEST_CFG)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java index f1701ed696d2c..4e6c3a545da06 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java @@ -37,6 +37,7 @@ import java.io.IOException; import java.util.List; import java.util.Map; +import java.util.Set; import static org.elasticsearch.xpack.esql.EsqlTestUtils.TEST_CFG; import static org.elasticsearch.xpack.esql.EsqlTestUtils.TEST_VERIFIER; @@ -208,7 +209,7 @@ protected DataNodeRequest mutateInstance(DataNodeRequest in) throws IOException static LogicalPlan parse(String query) { Map mapping = loadMapping("mapping-basic.json"); - EsIndex test = new EsIndex("test", mapping); + EsIndex test = new EsIndex("test", mapping, Set.of("test")); IndexResolution getIndexResult = IndexResolution.valid(test); var logicalOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(TEST_CFG)); var analyzer = new Analyzer( From ff0f83f59d5eaa9214a1f4a819ad031985925e51 Mon Sep 17 00:00:00 2001 From: Felix Barnsteiner Date: Tue, 6 Feb 2024 12:58:42 +0100 Subject: [PATCH 060/106] Make field limit more predictable (#102885) Today, we're counting all mappers, including mappers for subfields that aren't explicitly added to the mapping towards the field limit. This means that some field types, such as `search_as_you_type` or `percolator` count as more than one field even though that's not apparent to users as they're just defining them as a single field in the mapping. This change makes it so that each field mapper only counts as one. We're still counting multi-fields. This makes it easier to understand for users why the field limit is hit. ~In addition to that, it also simplifies https://github.com/elastic/elasticsearch/pull/96235 as it makes the implementation of `Mapper.Builder#getTotalFieldsCount` much easier and easier to align with `Mapper#getTotalFieldsCount`. This reduces the risk of over- or under-estimating the field count of a `Mapper.Builder` in `DocumentParserContext#addDynamicMapper`, which in turn reduces the risk of data loss due to the issue described here: https://github.com/elastic/elasticsearch/pull/96235#discussion_r1402495749.~ *Edit: due to https://github.com/elastic/elasticsearch/pull/103865, we don't need an implementation of `getTotalFieldsCount` or `mapperSize` in `Mapper.Builder`. Still, this PR more closely aligns `Mapper#getTotalFieldsCount` with `MappingLookup#getTotalFieldsCount`, which `DocumentParserContext#addDynamicMapper` uses to determine whether the field limit is hit* A potential risk of this is that we're now effectively allowing more fields in the mapping. It may be surprising to users that more fields can be added to a mapping. Although, I'd not expect negative consequences from that. Generally, I'd expect users to be happy about any change that reduces the risk of data loss. We could also think about whether to apply the new counting logic only to new indices (depending on the `IndexVersion`). However, that would add more complexity and I'm not convinced about the value. We'd then need to maintain two different ways of counting fields and also require passing in the `IndexVersion` to `MappingLookup` which previously didn't require the `IndexVersion`. This PR is meant as a conversation starter. It would also simplify https://github.com/elastic/elasticsearch/pull/96235 but I don't think this blocks that PR in any way. I'm curious about the opinion of @javanna and @jpountz on this. --- docs/changelog/102885.yaml | 5 +++ .../LegacyGeoShapeFieldMapperTests.java | 6 +++ .../org/elasticsearch/index/IndexService.java | 2 +- .../index/mapper/DocumentParserContext.java | 2 +- .../index/mapper/FieldAliasMapper.java | 5 +++ .../index/mapper/FieldMapper.java | 8 +++- .../elasticsearch/index/mapper/Mapper.java | 14 ++----- .../index/mapper/MappingLookup.java | 12 +++++- .../index/mapper/ObjectMapper.java | 9 ++++- .../index/mapper/RootObjectMapper.java | 8 +--- .../index/mapper/FieldAliasMapperTests.java | 2 +- .../index/mapper/ObjectMapperMergeTests.java | 40 +++++++++---------- .../index/mapper/ObjectMapperTests.java | 11 +++-- .../index/mapper/RootObjectMapperTests.java | 2 +- .../index/mapper/MapperTestCase.java | 5 +++ 15 files changed, 83 insertions(+), 48 deletions(-) create mode 100644 docs/changelog/102885.yaml diff --git a/docs/changelog/102885.yaml b/docs/changelog/102885.yaml new file mode 100644 index 0000000000000..7a998c3eb1f66 --- /dev/null +++ b/docs/changelog/102885.yaml @@ -0,0 +1,5 @@ +pr: 102885 +summary: Make field limit more predictable +area: Mapping +type: enhancement +issues: [] diff --git a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapperTests.java b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapperTests.java index 91a94fe174c21..0a0bb12bedbae 100644 --- a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapperTests.java +++ b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapperTests.java @@ -76,6 +76,12 @@ protected boolean supportsStoredFields() { return false; } + @Override + public void testTotalFieldsCount() throws IOException { + super.testTotalFieldsCount(); + assertWarnings("Parameter [strategy] is deprecated and will be removed in a future version"); + } + @Override protected void registerParameters(ParameterChecker checker) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/index/IndexService.java b/server/src/main/java/org/elasticsearch/index/IndexService.java index c5a5e5a5c4b96..f534d8b2dc806 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexService.java +++ b/server/src/main/java/org/elasticsearch/index/IndexService.java @@ -330,7 +330,7 @@ public NodeMappingStats getNodeMappingStats() { if (mapperService == null) { return null; } - long totalCount = mapperService().mappingLookup().getTotalFieldsCount(); + long totalCount = mapperService().mappingLookup().getTotalMapperCount(); long totalEstimatedOverhead = totalCount * 1024L; // 1KiB estimated per mapping NodeMappingStats indexNodeMappingStats = new NodeMappingStats(totalCount, totalEstimatedOverhead); return indexNodeMappingStats; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java index 0a669fb0ade8a..66c5de61bcd92 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java @@ -335,7 +335,7 @@ public final boolean addDynamicMapper(Mapper mapper) { if (mappingLookup.getMapper(mapper.name()) == null && mappingLookup.objectMappers().containsKey(mapper.name()) == false && dynamicMappers.containsKey(mapper.name()) == false) { - int mapperSize = mapper.mapperSize(); + int mapperSize = mapper.getTotalFieldsCount(); int additionalFieldsToAdd = getNewFieldsSize() + mapperSize; if (indexSettings().isIgnoreDynamicFieldsBeyondLimit()) { if (mappingLookup.exceedsLimit(indexSettings().getMappingTotalFieldsLimit(), additionalFieldsToAdd)) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldAliasMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldAliasMapper.java index c24ff9bb9c277..97d1b9368a6c9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldAliasMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldAliasMapper.java @@ -113,6 +113,11 @@ public void validate(MappingLookup mappers) { } } + @Override + public int getTotalFieldsCount() { + return 1; + } + public static class TypeParser implements Mapper.TypeParser { @Override public Mapper.Builder parse(String name, Map node, MappingParserContext parserContext) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 9ed23f61bf0ea..75d9fed2a4d4b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -51,6 +51,7 @@ import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; +import java.util.stream.Stream; import static org.elasticsearch.core.Strings.format; @@ -428,6 +429,11 @@ protected void doXContentBody(XContentBuilder builder, Params params) throws IOE protected abstract String contentType(); + @Override + public int getTotalFieldsCount() { + return 1 + Stream.of(multiFields.mappers).mapToInt(FieldMapper::getTotalFieldsCount).sum(); + } + public Map indexAnalyzers() { return Map.of(); } @@ -455,7 +461,7 @@ private void add(FieldMapper mapper) { private void update(FieldMapper toMerge, MapperMergeContext context) { if (mapperBuilders.containsKey(toMerge.simpleName()) == false) { - if (context.decrementFieldBudgetIfPossible(toMerge.mapperSize())) { + if (context.decrementFieldBudgetIfPossible(toMerge.getTotalFieldsCount())) { add(toMerge); } } else { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/Mapper.java b/server/src/main/java/org/elasticsearch/index/mapper/Mapper.java index ca15248c037bc..397f99f63030c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/Mapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/Mapper.java @@ -137,16 +137,8 @@ public static FieldType freezeAndDeduplicateFieldType(FieldType fieldType) { } /** - * Returns the size this mapper counts against the {@linkplain MapperService#INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING field limit}. - *

- * Needs to be in sync with {@link MappingLookup#getTotalFieldsCount()}. + * The total number of fields as defined in the mapping. + * Defines how this mapper counts towards {@link MapperService#INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING}. */ - public int mapperSize() { - int size = 1; - for (Mapper mapper : this) { - size += mapper.mapperSize(); - } - return size; - } - + public abstract int getTotalFieldsCount(); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java b/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java index ea59d6640f647..0ae13241b7f56 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java @@ -55,6 +55,7 @@ private CacheKey() {} private final List indexTimeScriptMappers; private final Mapping mapping; private final Set completionFields; + private final int totalFieldsCount; /** * Creates a new {@link MappingLookup} instance by parsing the provided mapping and extracting its field definitions. @@ -127,6 +128,7 @@ private MappingLookup( Collection objectMappers, Collection aliasMappers ) { + this.totalFieldsCount = mapping.getRoot().getTotalFieldsCount(); this.mapping = mapping; Map fieldMappers = new HashMap<>(); Map objects = new HashMap<>(); @@ -223,6 +225,14 @@ FieldTypeLookup fieldTypesLookup() { * Returns the total number of fields defined in the mappings, including field mappers, object mappers as well as runtime fields. */ public long getTotalFieldsCount() { + return totalFieldsCount; + } + + /** + * Returns the total number of mappers defined in the mappings, including field mappers and their sub-fields + * (which are not explicitly defined in the mappings), multi-fields, object mappers, runtime fields and metadata field mappers. + */ + public long getTotalMapperCount() { return fieldMappers.size() + objectMappers.size() + runtimeFieldMappersCount; } @@ -286,7 +296,7 @@ boolean exceedsLimit(long limit, int additionalFieldsToAdd) { } long remainingFieldsUntilLimit(long mappingTotalFieldsLimit) { - return mappingTotalFieldsLimit - getTotalFieldsCount() + mapping.getSortedMetadataMappers().length; + return mappingTotalFieldsLimit - totalFieldsCount; } private void checkDimensionFieldLimit(long limit) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java index 9d7353859ed25..0bce02564ef34 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java @@ -182,6 +182,11 @@ public ObjectMapper build(MapperBuilderContext context) { } } + @Override + public int getTotalFieldsCount() { + return 1 + mappers.values().stream().mapToInt(Mapper::getTotalFieldsCount).sum(); + } + public static class TypeParser implements Mapper.TypeParser { @Override public boolean supportsVersion(IndexVersion indexCreatedVersion) { @@ -547,7 +552,7 @@ private static Map buildMergedMappers( Mapper mergeIntoMapper = mergedMappers.get(mergeWithMapper.simpleName()); Mapper merged = null; if (mergeIntoMapper == null) { - if (objectMergeContext.decrementFieldBudgetIfPossible(mergeWithMapper.mapperSize())) { + if (objectMergeContext.decrementFieldBudgetIfPossible(mergeWithMapper.getTotalFieldsCount())) { merged = mergeWithMapper; } else if (mergeWithMapper instanceof ObjectMapper om) { merged = truncateObjectMapper(reason, objectMergeContext, om); @@ -581,7 +586,7 @@ private static ObjectMapper truncateObjectMapper(MergeReason reason, MapperMerge // there's not enough capacity for the whole object mapper, // so we're just trying to add the shallow object, without it's sub-fields ObjectMapper shallowObjectMapper = objectMapper.withoutMappers(); - if (context.decrementFieldBudgetIfPossible(shallowObjectMapper.mapperSize())) { + if (context.decrementFieldBudgetIfPossible(shallowObjectMapper.getTotalFieldsCount())) { // now trying to add the sub-fields one by one via a merge, until we hit the limit return shallowObjectMapper.merge(objectMapper, reason, context); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java index 7994c018f40f2..2fe8c49df2175 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java @@ -576,11 +576,7 @@ private static boolean processField( } @Override - public int mapperSize() { - int size = runtimeFields().size(); - for (Mapper mapper : this) { - size += mapper.mapperSize(); - } - return size; + public int getTotalFieldsCount() { + return mappers.values().stream().mapToInt(Mapper::getTotalFieldsCount).sum() + runtimeFields.size(); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperTests.java index f816f403be89f..c20091a308ed8 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldAliasMapperTests.java @@ -34,7 +34,7 @@ public void testParsing() throws IOException { ); DocumentMapper mapper = createDocumentMapper(mapping); assertEquals(mapping, mapper.mappingSource().toString()); - assertEquals(2, mapper.mapping().getRoot().mapperSize()); + assertEquals(2, mapper.mapping().getRoot().getTotalFieldsCount()); } public void testParsingWithMissingPath() { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java index 0737dcb7cb5d2..005b14886d059 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperMergeTests.java @@ -213,10 +213,10 @@ public void testMergeWithLimit() { final ObjectMapper mergedAdd1 = rootObjectMapper.merge(mergeWith, MapperMergeContext.root(false, false, 1)); // THEN "baz" new field is added to merged mapping - assertEquals(3, rootObjectMapper.mapperSize()); - assertEquals(4, mergeWith.mapperSize()); - assertEquals(3, mergedAdd0.mapperSize()); - assertEquals(4, mergedAdd1.mapperSize()); + assertEquals(3, rootObjectMapper.getTotalFieldsCount()); + assertEquals(4, mergeWith.getTotalFieldsCount()); + assertEquals(3, mergedAdd0.getTotalFieldsCount()); + assertEquals(4, mergedAdd1.getTotalFieldsCount()); } public void testMergeWithLimitTruncatedObjectField() { @@ -231,11 +231,11 @@ public void testMergeWithLimitTruncatedObjectField() { ObjectMapper mergedAdd1 = root.merge(mergeWith, MapperMergeContext.root(false, false, 1)); ObjectMapper mergedAdd2 = root.merge(mergeWith, MapperMergeContext.root(false, false, 2)); ObjectMapper mergedAdd3 = root.merge(mergeWith, MapperMergeContext.root(false, false, 3)); - assertEquals(0, root.mapperSize()); - assertEquals(0, mergedAdd0.mapperSize()); - assertEquals(1, mergedAdd1.mapperSize()); - assertEquals(2, mergedAdd2.mapperSize()); - assertEquals(3, mergedAdd3.mapperSize()); + assertEquals(0, root.getTotalFieldsCount()); + assertEquals(0, mergedAdd0.getTotalFieldsCount()); + assertEquals(1, mergedAdd1.getTotalFieldsCount()); + assertEquals(2, mergedAdd2.getTotalFieldsCount()); + assertEquals(3, mergedAdd3.getTotalFieldsCount()); ObjectMapper parent1 = (ObjectMapper) mergedAdd1.getMapper("parent"); assertNull(parent1.getMapper("child1")); @@ -262,9 +262,9 @@ public void testMergeSameObjectDifferentFields() { ObjectMapper mergedAdd0 = root.merge(mergeWith, MapperMergeContext.root(false, false, 0)); ObjectMapper mergedAdd1 = root.merge(mergeWith, MapperMergeContext.root(false, false, 1)); - assertEquals(2, root.mapperSize()); - assertEquals(2, mergedAdd0.mapperSize()); - assertEquals(3, mergedAdd1.mapperSize()); + assertEquals(2, root.getTotalFieldsCount()); + assertEquals(2, mergedAdd0.getTotalFieldsCount()); + assertEquals(3, mergedAdd1.getTotalFieldsCount()); ObjectMapper parent0 = (ObjectMapper) mergedAdd0.getMapper("parent"); assertNotNull(parent0.getMapper("child1")); @@ -285,13 +285,13 @@ public void testMergeWithLimitMultiField() { createTextKeywordMultiField("text", "keyword2") ).build(MapperBuilderContext.root(false, false)); - assertEquals(2, mergeInto.mapperSize()); - assertEquals(2, mergeWith.mapperSize()); + assertEquals(2, mergeInto.getTotalFieldsCount()); + assertEquals(2, mergeWith.getTotalFieldsCount()); ObjectMapper mergedAdd0 = mergeInto.merge(mergeWith, MapperMergeContext.root(false, false, 0)); ObjectMapper mergedAdd1 = mergeInto.merge(mergeWith, MapperMergeContext.root(false, false, 1)); - assertEquals(2, mergedAdd0.mapperSize()); - assertEquals(3, mergedAdd1.mapperSize()); + assertEquals(2, mergedAdd0.getTotalFieldsCount()); + assertEquals(3, mergedAdd1.getTotalFieldsCount()); } public void testMergeWithLimitRuntimeField() { @@ -302,13 +302,13 @@ public void testMergeWithLimitRuntimeField() { new TestRuntimeField("existing_runtime_field", "keyword") ).addRuntimeField(new TestRuntimeField("new_runtime_field", "keyword")).build(MapperBuilderContext.root(false, false)); - assertEquals(3, mergeInto.mapperSize()); - assertEquals(2, mergeWith.mapperSize()); + assertEquals(3, mergeInto.getTotalFieldsCount()); + assertEquals(2, mergeWith.getTotalFieldsCount()); ObjectMapper mergedAdd0 = mergeInto.merge(mergeWith, MapperMergeContext.root(false, false, 0)); ObjectMapper mergedAdd1 = mergeInto.merge(mergeWith, MapperMergeContext.root(false, false, 1)); - assertEquals(3, mergedAdd0.mapperSize()); - assertEquals(4, mergedAdd1.mapperSize()); + assertEquals(3, mergedAdd0.getTotalFieldsCount()); + assertEquals(4, mergedAdd1.getTotalFieldsCount()); } private static RootObjectMapper createRootSubobjectFalseLeafWithDots() { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java index cbb0929b813fc..29e5f8540734b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java @@ -530,15 +530,20 @@ public void testSyntheticSourceDocValuesFieldWithout() throws IOException { assertThat(mapper.mapping().getRoot().syntheticFieldLoader().docValuesLoader(null, null), nullValue()); } - public void testNestedObjectWithMultiFieldsMapperSize() throws IOException { + public void testNestedObjectWithMultiFieldsgetTotalFieldsCount() { ObjectMapper.Builder mapperBuilder = new ObjectMapper.Builder("parent_size_1", Explicit.IMPLICIT_TRUE).add( new ObjectMapper.Builder("child_size_2", Explicit.IMPLICIT_TRUE).add( new TextFieldMapper.Builder("grand_child_size_3", createDefaultIndexAnalyzers()).addMultiField( new KeywordFieldMapper.Builder("multi_field_size_4", IndexVersion.current()) - ).addMultiField(new KeywordFieldMapper.Builder("multi_field_size_5", IndexVersion.current())) + ) + .addMultiField( + new TextFieldMapper.Builder("grand_child_size_5", createDefaultIndexAnalyzers()).addMultiField( + new KeywordFieldMapper.Builder("multi_field_of_multi_field_size_6", IndexVersion.current()) + ) + ) ) ); - assertThat(mapperBuilder.build(MapperBuilderContext.root(false, false)).mapperSize(), equalTo(5)); + assertThat(mapperBuilder.build(MapperBuilderContext.root(false, false)).getTotalFieldsCount(), equalTo(6)); } public void testWithoutMappers() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java index 662a809e6d065..b616aa70dafde 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RootObjectMapperTests.java @@ -166,7 +166,7 @@ public void testRuntimeSection() throws IOException { })); MapperService mapperService = createMapperService(mapping); assertEquals(mapping, mapperService.documentMapper().mappingSource().toString()); - assertEquals(3, mapperService.documentMapper().mapping().getRoot().mapperSize()); + assertEquals(3, mapperService.documentMapper().mapping().getRoot().getTotalFieldsCount()); } public void testRuntimeSectionRejectedUpdate() throws IOException { diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java index 05aee30799de2..43ac8057a3fc0 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java @@ -427,6 +427,11 @@ public final void testMinimalToMaximal() throws IOException { assertParseMaximalWarnings(); } + public void testTotalFieldsCount() throws IOException { + MapperService mapperService = createMapperService(fieldMapping(this::minimalMapping)); + assertEquals(1, mapperService.documentMapper().mapping().getRoot().getTotalFieldsCount()); + } + protected final void assertParseMinimalWarnings() { String[] warnings = getParseMinimalWarnings(); if (warnings.length > 0) { From 1f46c4af4ab2981abb709abe65d5016a90cb49e6 Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Tue, 6 Feb 2024 13:49:57 +0100 Subject: [PATCH 061/106] [Connector API] Improve user-facing 404 error messages (#105181) --- .../connector/ConnectorIndexService.java | 35 +++++++++++-------- 1 file changed, 20 insertions(+), 15 deletions(-) diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index b321a497ab58d..6392052e9f0b9 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -105,6 +105,7 @@ public void createConnectorWithDocId(PutConnectorAction.Request request, ActionL try { isDataIndexNameAlreadyInUse(indexName, connectorId, listener.delegateFailure((l, isIndexNameInUse) -> { if (isIndexNameInUse) { + l.onFailure( new ElasticsearchStatusException( "Index name [" + indexName + "] is used by another connector.", @@ -230,7 +231,7 @@ public void getConnector(String connectorId, ActionListener(connectorId, listener, (l, getResponse) -> { if (getResponse.isExists() == false) { - l.onFailure(new ResourceNotFoundException(connectorId)); + l.onFailure(new ResourceNotFoundException(connectorNotFoundErrorMsg(connectorId))); return; } try { @@ -265,7 +266,7 @@ public void deleteConnector(String connectorId, ActionListener l deleteRequest, new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, deleteResponse) -> { if (deleteResponse.getResult() == DocWriteResponse.Result.NOT_FOUND) { - l.onFailure(new ResourceNotFoundException(connectorId)); + l.onFailure(new ResourceNotFoundException(connectorNotFoundErrorMsg(connectorId))); return; } l.onResponse(deleteResponse); @@ -392,7 +393,7 @@ public void updateConnectorConfiguration(UpdateConnectorConfigurationAction.Requ updateRequest, new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, updateResponse) -> { if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { - l.onFailure(new ResourceNotFoundException(connectorId)); + l.onFailure(new ResourceNotFoundException(connectorNotFoundErrorMsg(connectorId))); return; } l.onResponse(updateResponse); @@ -422,7 +423,7 @@ public void updateConnectorError(UpdateConnectorErrorAction.Request request, Act updateRequest, new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, updateResponse) -> { if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { - l.onFailure(new ResourceNotFoundException(connectorId)); + l.onFailure(new ResourceNotFoundException(connectorNotFoundErrorMsg(connectorId))); return; } l.onResponse(updateResponse); @@ -453,7 +454,7 @@ public void updateConnectorNameOrDescription(UpdateConnectorNameAction.Request r updateRequest, new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, updateResponse) -> { if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { - l.onFailure(new ResourceNotFoundException(connectorId)); + l.onFailure(new ResourceNotFoundException(connectorNotFoundErrorMsg(connectorId))); return; } l.onResponse(updateResponse); @@ -483,7 +484,7 @@ public void updateConnectorFiltering(UpdateConnectorFilteringAction.Request requ updateRequest, new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, updateResponse) -> { if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { - l.onFailure(new ResourceNotFoundException(connectorId)); + l.onFailure(new ResourceNotFoundException(connectorNotFoundErrorMsg(connectorId))); return; } l.onResponse(updateResponse); @@ -512,7 +513,7 @@ public void checkInConnector(String connectorId, ActionListener updateRequest, new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, updateResponse) -> { if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { - l.onFailure(new ResourceNotFoundException(connectorId)); + l.onFailure(new ResourceNotFoundException(connectorNotFoundErrorMsg(connectorId))); return; } l.onResponse(updateResponse); @@ -542,7 +543,7 @@ public void updateConnectorLastSyncStats(UpdateConnectorLastSyncStatsAction.Requ updateRequest, new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, updateResponse) -> { if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { - l.onFailure(new ResourceNotFoundException(connectorId)); + l.onFailure(new ResourceNotFoundException(connectorNotFoundErrorMsg(connectorId))); return; } l.onResponse(updateResponse); @@ -582,7 +583,7 @@ public void updateConnectorNative(UpdateConnectorNativeAction.Request request, A updateRequest, new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, updateResponse) -> { if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { - l.onFailure(new ResourceNotFoundException(connectorId)); + l.onFailure(new ResourceNotFoundException(connectorNotFoundErrorMsg(connectorId))); return; } l.onResponse(updateResponse); @@ -614,7 +615,7 @@ public void updateConnectorPipeline(UpdateConnectorPipelineAction.Request reques updateRequest, new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, updateResponse) -> { if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { - l.onFailure(new ResourceNotFoundException(connectorId)); + l.onFailure(new ResourceNotFoundException(connectorNotFoundErrorMsg(connectorId))); return; } l.onResponse(updateResponse); @@ -659,7 +660,7 @@ public void updateConnectorIndexName(UpdateConnectorIndexNameAction.Request requ updateRequest, new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (ll, updateResponse) -> { if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { - ll.onFailure(new ResourceNotFoundException(connectorId)); + ll.onFailure(new ResourceNotFoundException(connectorNotFoundErrorMsg(connectorId))); return; } ll.onResponse(updateResponse); @@ -691,7 +692,7 @@ public void updateConnectorScheduling(UpdateConnectorSchedulingAction.Request re updateRequest, new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, updateResponse) -> { if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { - l.onFailure(new ResourceNotFoundException(connectorId)); + l.onFailure(new ResourceNotFoundException(connectorNotFoundErrorMsg(connectorId))); return; } l.onResponse(updateResponse); @@ -736,7 +737,7 @@ public void updateConnectorServiceType(UpdateConnectorServiceTypeAction.Request updateRequest, new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (updateListener, updateResponse) -> { if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { - updateListener.onFailure(new ResourceNotFoundException(connectorId)); + updateListener.onFailure(new ResourceNotFoundException(connectorNotFoundErrorMsg(connectorId))); return; } updateListener.onResponse(updateResponse); @@ -779,7 +780,7 @@ public void updateConnectorStatus(UpdateConnectorStatusAction.Request request, A updateRequest, new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (updateListener, updateResponse) -> { if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { - updateListener.onFailure(new ResourceNotFoundException(connectorId)); + updateListener.onFailure(new ResourceNotFoundException(connectorNotFoundErrorMsg(connectorId))); return; } updateListener.onResponse(updateResponse); @@ -807,7 +808,7 @@ public void updateConnectorApiKeyIdOrApiKeySecretId( updateRequest, new DelegatingIndexNotFoundActionListener<>(connectorId, listener, (l, updateResponse) -> { if (updateResponse.getResult() == UpdateResponse.Result.NOT_FOUND) { - l.onFailure(new ResourceNotFoundException(connectorId)); + l.onFailure(new ResourceNotFoundException(connectorNotFoundErrorMsg(connectorId))); return; } l.onResponse(updateResponse); @@ -882,6 +883,10 @@ public void onFailure(Exception e) { } } + private String connectorNotFoundErrorMsg(String connectorId) { + return "connector [" + connectorId + "] not found"; + } + public record ConnectorResult(List connectors, long totalResults) {} /** From 4cf89428d7f6fcc27cfe64e9adbcc90f54d308f6 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 6 Feb 2024 14:19:52 +0100 Subject: [PATCH 062/106] Use pooled search response sources in EQL (#105179) Remove all usage of asUnpooled from EQL, use ref counting in the one spot where the logic forks. --- .../xpack/eql/action/EqlSearchResponse.java | 1 + .../eql/execution/sample/SampleIterator.java | 8 ++--- .../eql/execution/search/RuntimeUtils.java | 5 --- .../execution/sequence/TumblingWindow.java | 33 ++++++++++++------- 4 files changed, 26 insertions(+), 21 deletions(-) diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java index 5eef57cbb6c5b..be1d4c0871ca7 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java @@ -277,6 +277,7 @@ public Event(String index, String id, BytesReference source, Map listener) { for (int responseIndex = 0; responseIndex < response.length; responseIndex++) { MultiSearchResponse.Item item = response[responseIndex]; - final var hits = RuntimeUtils.searchHits(item.getResponse()); - if (hits.size() > 0) { - sample.add(hits); + final var hits = item.getResponse().getHits(); + if (hits.getHits().length > 0) { + sample.add(Arrays.asList(hits.getHits())); } if (docGroupsCounter == maxCriteria) { List> matches = matchSamples(sample, maxCriteria, maxSamplesPerKey); diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/RuntimeUtils.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/RuntimeUtils.java index a2309c48578a3..40f7f7139efa1 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/RuntimeUtils.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/RuntimeUtils.java @@ -181,11 +181,6 @@ public static SearchRequest prepareRequest(SearchSourceBuilder source, boolean i return searchRequest; } - public static List searchHits(SearchResponse response) { - // TODO remove unpooled usage - return Arrays.asList(response.getHits().asUnpooled().getHits()); - } - /** * optimized method that adds filter to existing bool queries without additional wrapping * additionally checks whether the given query exists for safe decoration diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/TumblingWindow.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/TumblingWindow.java index 35f171806ccb2..eabf6df518ad4 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/TumblingWindow.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/TumblingWindow.java @@ -19,6 +19,7 @@ import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xpack.eql.execution.assembler.BoxedQueryRequest; @@ -38,6 +39,7 @@ import org.elasticsearch.xpack.ql.util.CollectionUtils; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.LinkedHashMap; @@ -49,7 +51,6 @@ import static org.elasticsearch.action.ActionListener.runAfter; import static org.elasticsearch.xpack.eql.execution.ExecutionUtils.copySource; import static org.elasticsearch.xpack.eql.execution.search.RuntimeUtils.combineFilters; -import static org.elasticsearch.xpack.eql.execution.search.RuntimeUtils.searchHits; import static org.elasticsearch.xpack.eql.util.SearchHitUtils.qualifiedIndex; /** @@ -361,18 +362,19 @@ private void advance(int stage, ActionListener listener) { */ private void baseCriterion(int baseStage, SearchResponse r, ActionListener listener) { SequenceCriterion base = criteria.get(baseStage); - List hits = searchHits(r); + SearchHits hits = r.getHits(); - log.trace("Found [{}] hits", hits.size()); + log.trace("Found [{}] hits", hits.getHits().length); Ordinal begin = null, end = null; WindowInfo info; // if there is at least one result, process it - if (hits.isEmpty() == false) { + if (hits.getHits().length > 0) { // get borders for the rest of the queries - but only when at least one result is found - begin = headOrdinal(hits, base); - end = tailOrdinal(hits, base); + var hitsAsList = Arrays.asList(hits.getHits()); + begin = headOrdinal(hitsAsList, base); + end = tailOrdinal(hitsAsList, base); // always create an ASC window info = new WindowInfo(baseStage, begin, end); @@ -391,7 +393,14 @@ private void baseCriterion(int baseStage, SearchResponse r, ActionListener 0) { // find "until" ordinals - early on to discard data in-flight to avoid matching // hits that can occur in other documents - untilCriterion(info, listener, () -> completeBaseCriterion(baseStage, hits, info, listener)); + hits.incRef(); + untilCriterion(info, listener, () -> { + try { + completeBaseCriterion(baseStage, hits, info, listener); + } finally { + hits.decRef(); + } + }); return; } } else { @@ -405,17 +414,17 @@ private void baseCriterion(int baseStage, SearchResponse r, ActionListener hits, WindowInfo info, ActionListener listener) { + private void completeBaseCriterion(int baseStage, SearchHits hits, WindowInfo info, ActionListener listener) { SequenceCriterion base = criteria.get(baseStage); // check for matches - if the limit has been reached, abort - if (matcher.match(baseStage, wrapValues(base, hits)) == false) { + if (matcher.match(baseStage, wrapValues(base, Arrays.asList(hits.getHits()))) == false) { payload(listener); return; } int nextStage = nextPositiveStage(baseStage); - boolean windowCompleted = hits.size() < windowSize; + boolean windowCompleted = hits.getHits().length < windowSize; // there are still queries if (nextStage > 0) { // -1 means no further positive stages @@ -527,7 +536,7 @@ private void untilCriterion(WindowInfo window, ActionListener listener, log.trace("Querying until stage {}", request); client.query(request, listener.delegateFailureAndWrap((delegate, r) -> { - List hits = searchHits(r); + List hits = Arrays.asList(r.getHits().getHits()); log.trace("Found [{}] hits", hits.size()); // no more results for until - let the other queries run @@ -558,7 +567,7 @@ private void secondaryCriterion(WindowInfo window, int currentStage, ActionListe log.trace("Querying (secondary) stage [{}] {}", criterion.stage(), request); client.query(request, listener.delegateFailureAndWrap((delegate, r) -> { - List hits = searchHits(r); + List hits = Arrays.asList(r.getHits().getHits()); // filter hits that are escaping the window (same timestamp but different tiebreaker) // apply it only to ASC queries; DESC queries need it to find matches going the opposite direction From 5b2cdac76156c0691c8f42d156c3fbded6370ef1 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 6 Feb 2024 15:22:17 +0100 Subject: [PATCH 063/106] Some cleanup of SearchTransportService (#105185) Cleanup some duplication (mostly of lambdas), remove getter calls and allocation from hot transport loop and drop some dead code. --- .../action/search/ExpandSearchPhase.java | 2 +- .../action/search/SearchResponseSections.java | 19 ---- .../search/SearchScrollAsyncAction.java | 2 +- .../action/search/SearchTransportService.java | 93 ++++++++----------- .../action/search/TransportSearchAction.java | 4 +- 5 files changed, 41 insertions(+), 79 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java index 7741c1483f69a..149cdb9206b34 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java @@ -121,7 +121,7 @@ private static SearchSourceBuilder buildExpandSearchSourceBuilder(InnerHitBuilde } } if (options.getFetchFields() != null) { - options.getFetchFields().forEach(ff -> groupSource.fetchField(ff)); + options.getFetchFields().forEach(groupSource::fetchField); } if (options.getDocValueFields() != null) { options.getDocValueFields().forEach(ff -> groupSource.docValueField(ff.field, ff.format)); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java index a3763bf101b15..b2ccdc610a4c2 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java @@ -79,33 +79,14 @@ protected void closeInternal() { }) : ALWAYS_REFERENCED; } - public final boolean timedOut() { - return this.timedOut; - } - - public final Boolean terminatedEarly() { - return this.terminatedEarly; - } - public final SearchHits hits() { return hits; } - public final InternalAggregations aggregations() { - return aggregations; - } - public final Suggest suggest() { return suggest; } - /** - * Returns the number of reduce phases applied to obtain this search response - */ - public final int getNumReducePhases() { - return numReducePhases; - } - /** * Returns the profile results for this search response (including all shards). * An empty map is returned if profiling was not enabled diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java index 0616a99fc5dd0..62b39dd675387 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java @@ -203,7 +203,7 @@ synchronized ShardSearchFailure[] buildShardFailures() { // pkg private for test if (shardFailures.isEmpty()) { return ShardSearchFailure.EMPTY_ARRAY; } - return shardFailures.toArray(new ShardSearchFailure[shardFailures.size()]); + return shardFailures.toArray(ShardSearchFailure.EMPTY_ARRAY); } // we do our best to return the shard failures, but its ok if its not fully concurrently safe diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java index 9fb0c87c78eb7..d0ae8d1ccb3f1 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java @@ -194,7 +194,7 @@ public void sendExecuteDfs( DFS_ACTION_NAME, request, task, - new ConnectionCountingHandler<>(listener, DfsSearchResult::new, clientConnections, connection.getNode().getId()) + new ConnectionCountingHandler<>(listener, DfsSearchResult::new, connection) ); } @@ -216,7 +216,7 @@ public void sendExecuteQuery( QUERY_ACTION_NAME, request, task, - new ConnectionCountingHandler<>(handler, reader, clientConnections, connection.getNode().getId()) + new ConnectionCountingHandler<>(handler, reader, connection) ); } @@ -231,7 +231,7 @@ public void sendExecuteQuery( QUERY_ID_ACTION_NAME, request, task, - new ConnectionCountingHandler<>(listener, QuerySearchResult::new, clientConnections, connection.getNode().getId()) + new ConnectionCountingHandler<>(listener, QuerySearchResult::new, connection) ); } @@ -246,7 +246,7 @@ public void sendExecuteScrollQuery( QUERY_SCROLL_ACTION_NAME, request, task, - new ConnectionCountingHandler<>(listener, ScrollQuerySearchResult::new, clientConnections, connection.getNode().getId()) + new ConnectionCountingHandler<>(listener, ScrollQuerySearchResult::new, connection) ); } @@ -261,7 +261,7 @@ public void sendExecuteScrollFetch( QUERY_FETCH_SCROLL_ACTION_NAME, request, task, - new ConnectionCountingHandler<>(listener, ScrollQueryFetchSearchResult::new, clientConnections, connection.getNode().getId()) + new ConnectionCountingHandler<>(listener, ScrollQueryFetchSearchResult::new, connection) ); } @@ -295,7 +295,7 @@ private void sendExecuteFetch( action, request, task, - new ConnectionCountingHandler<>(listener, FetchSearchResult::new, clientConnections, connection.getNode().getId()) + new ConnectionCountingHandler<>(listener, FetchSearchResult::new, connection) ); } @@ -309,7 +309,7 @@ void sendExecuteMultiSearch(final MultiSearchRequest request, SearchTask task, f TransportMultiSearchAction.TYPE.name(), request, task, - new ConnectionCountingHandler<>(listener, MultiSearchResponse::new, clientConnections, connection.getNode().getId()) + new ConnectionCountingHandler<>(listener, MultiSearchResponse::new, connection) ); } @@ -413,14 +413,15 @@ public static void registerRequestHandler( SearchService searchService, SearchTransportAPMMetrics searchTransportMetrics ) { + final TransportRequestHandler freeContextHandler = (request, channel, task) -> { + boolean freed = searchService.freeReaderContext(request.id()); + channel.sendResponse(new SearchFreeContextResponse(freed)); + }; transportService.registerRequestHandler( FREE_CONTEXT_SCROLL_ACTION_NAME, EsExecutors.DIRECT_EXECUTOR_SERVICE, ScrollFreeContextRequest::new, - instrumentedHandler(FREE_CONTEXT_SCROLL_ACTION_METRIC, transportService, searchTransportMetrics, (request, channel, task) -> { - boolean freed = searchService.freeReaderContext(request.id()); - channel.sendResponse(new SearchFreeContextResponse(freed)); - }) + instrumentedHandler(FREE_CONTEXT_SCROLL_ACTION_METRIC, transportService, searchTransportMetrics, freeContextHandler) ); TransportActionProxy.registerProxyAction(transportService, FREE_CONTEXT_SCROLL_ACTION_NAME, false, SearchFreeContextResponse::new); @@ -428,10 +429,7 @@ public static void registerRequestHandler( FREE_CONTEXT_ACTION_NAME, EsExecutors.DIRECT_EXECUTOR_SERVICE, SearchFreeContextRequest::new, - instrumentedHandler(FREE_CONTEXT_ACTION_METRIC, transportService, searchTransportMetrics, (request, channel, task) -> { - boolean freed = searchService.freeReaderContext(request.id()); - channel.sendResponse(new SearchFreeContextResponse(freed)); - }) + instrumentedHandler(FREE_CONTEXT_ACTION_METRIC, transportService, searchTransportMetrics, freeContextHandler) ); TransportActionProxy.registerProxyAction(transportService, FREE_CONTEXT_ACTION_NAME, false, SearchFreeContextResponse::new); @@ -541,20 +539,13 @@ public static void registerRequestHandler( ); TransportActionProxy.registerProxyAction(transportService, QUERY_FETCH_SCROLL_ACTION_NAME, true, ScrollQueryFetchSearchResult::new); + final TransportRequestHandler shardFetchRequestHandler = (request, channel, task) -> searchService + .executeFetchPhase(request, (SearchShardTask) task, new ChannelActionListener<>(channel)); transportService.registerRequestHandler( FETCH_ID_SCROLL_ACTION_NAME, EsExecutors.DIRECT_EXECUTOR_SERVICE, ShardFetchRequest::new, - instrumentedHandler( - FETCH_ID_SCROLL_ACTION_METRIC, - transportService, - searchTransportMetrics, - (request, channel, task) -> searchService.executeFetchPhase( - request, - (SearchShardTask) task, - new ChannelActionListener<>(channel) - ) - ) + instrumentedHandler(FETCH_ID_SCROLL_ACTION_METRIC, transportService, searchTransportMetrics, shardFetchRequestHandler) ); TransportActionProxy.registerProxyAction(transportService, FETCH_ID_SCROLL_ACTION_NAME, true, FetchSearchResult::new); @@ -564,16 +555,7 @@ public static void registerRequestHandler( true, true, ShardFetchSearchRequest::new, - instrumentedHandler( - FETCH_ID_ACTION_METRIC, - transportService, - searchTransportMetrics, - (request, channel, task) -> searchService.executeFetchPhase( - request, - (SearchShardTask) task, - new ChannelActionListener<>(channel) - ) - ) + instrumentedHandler(FETCH_ID_ACTION_METRIC, transportService, searchTransportMetrics, shardFetchRequestHandler) ); TransportActionProxy.registerProxyAction(transportService, FETCH_ID_ACTION_NAME, true, FetchSearchResult::new); @@ -597,13 +579,16 @@ private static TransportRequestHandler transportRequestHandler ) { + var threadPool = transportService.getThreadPool(); + var latencies = searchTransportMetrics.getActionLatencies(); + Map attributes = Map.of(ACTION_ATTRIBUTE_NAME, actionQualifier); return (request, channel, task) -> { - var startTime = transportService.getThreadPool().relativeTimeInMillis(); + var startTime = threadPool.relativeTimeInMillis(); try { transportRequestHandler.messageReceived(request, channel, task); } finally { - var elapsedTime = transportService.getThreadPool().relativeTimeInMillis() - startTime; - searchTransportMetrics.getActionLatencies().record(elapsedTime, Map.of(ACTION_ATTRIBUTE_NAME, actionQualifier)); + var elapsedTime = threadPool.relativeTimeInMillis() - startTime; + latencies.record(elapsedTime, attributes); } }; } @@ -624,19 +609,16 @@ public Transport.Connection getConnection(@Nullable String clusterAlias, Discove } } - static final class ConnectionCountingHandler extends ActionListenerResponseHandler { - private final Map clientConnections; + private final class ConnectionCountingHandler extends ActionListenerResponseHandler { private final String nodeId; ConnectionCountingHandler( final ActionListener listener, final Writeable.Reader responseReader, - final Map clientConnections, - final String nodeId + final Transport.Connection connection ) { super(listener, responseReader, TransportResponseHandler.TRANSPORT_WORKER); - this.clientConnections = clientConnections; - this.nodeId = nodeId; + this.nodeId = connection.getNode().getId(); // Increment the number of connections for this node by one clientConnections.compute(nodeId, (id, conns) -> conns == null ? 1 : conns + 1); } @@ -644,27 +626,26 @@ static final class ConnectionCountingHandler @Override public void handleResponse(Response response) { super.handleResponse(response); - // Decrement the number of connections or remove it entirely if there are no more connections - // We need to remove the entry here so we don't leak when nodes go away forever - assert assertNodePresent(); - clientConnections.computeIfPresent(nodeId, (id, conns) -> conns.longValue() == 1 ? null : conns - 1); + decConnectionCount(); } @Override public void handleException(TransportException e) { super.handleException(e); - // Decrement the number of connections or remove it entirely if there are no more connections - // We need to remove the entry here so we don't leak when nodes go away forever + decConnectionCount(); + } + + // Decrement the number of connections or remove it entirely if there are no more connections + // We need to remove the entry here so we don't leak when nodes go away forever + private void decConnectionCount() { assert assertNodePresent(); - clientConnections.computeIfPresent(nodeId, (id, conns) -> conns.longValue() == 1 ? null : conns - 1); + clientConnections.computeIfPresent(nodeId, (id, conns) -> conns == 1 ? null : conns - 1); } private boolean assertNodePresent() { - clientConnections.compute(nodeId, (id, conns) -> { - assert conns != null : "number of connections for " + id + " is null, but should be an integer"; - assert conns >= 1 : "number of connections for " + id + " should be >= 1 but was " + conns; - return conns; - }); + var conns = clientConnections.get(nodeId); + assert conns != null : "number of connections for " + nodeId + " is null, but should be an integer"; + assert conns >= 1 : "number of connections for " + nodeId + " should be >= 1 but was " + conns; // Always return true, there is additional asserting here, the boolean is just so this // can be skipped when assertions are not enabled return true; diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 53ed6853fc08c..d80322b2954c6 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -360,7 +360,7 @@ void executeRequest( localIndices, remoteClusterIndices, true, - alias -> remoteClusterService.isSkipUnavailable(alias) + remoteClusterService::isSkipUnavailable ); if (localIndices == null) { // Notify the progress listener that a CCS with minimize_roundtrips is happening remote-only (no local shards) @@ -395,7 +395,7 @@ void executeRequest( localIndices, remoteClusterIndices, false, - alias -> remoteClusterService.isSkipUnavailable(alias) + remoteClusterService::isSkipUnavailable ); // TODO: pass parentTaskId collectSearchShards( From 290c16379d0baa5c2964ae3792337fcedc6b00d8 Mon Sep 17 00:00:00 2001 From: David Roberts Date: Tue, 6 Feb 2024 14:32:32 +0000 Subject: [PATCH 064/106] [ML] Make _inference APIs available in serverless (#105190) Allow the `_inference` APIs to be called from outside the Elasticsearch cluster in serverless. The client annotations still need updating so that the clients know the APIs exist. But that cannot happen until the APIs themselves are visible, so this first step makes that happen. --- .../xpack/inference/rest/RestDeleteInferenceModelAction.java | 3 +++ .../xpack/inference/rest/RestGetInferenceModelAction.java | 3 +++ .../xpack/inference/rest/RestInferenceAction.java | 3 +++ .../xpack/inference/rest/RestPutInferenceModelAction.java | 3 +++ 4 files changed, 12 insertions(+) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestDeleteInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestDeleteInferenceModelAction.java index 603b5ef41da73..9efecf1d28024 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestDeleteInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestDeleteInferenceModelAction.java @@ -10,6 +10,8 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.inference.action.DeleteInferenceModelAction; @@ -17,6 +19,7 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; +@ServerlessScope(Scope.PUBLIC) public class RestDeleteInferenceModelAction extends BaseRestHandler { @Override public String getName() { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java index 3dfa713cdecc7..310c9b6dd6f95 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java @@ -11,6 +11,8 @@ import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.inference.action.GetInferenceModelAction; @@ -18,6 +20,7 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; +@ServerlessScope(Scope.PUBLIC) public class RestGetInferenceModelAction extends BaseRestHandler { @Override public String getName() { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java index ba7e6b363f003..ff1931438dda9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java @@ -10,6 +10,8 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.inference.action.InferenceAction; @@ -18,6 +20,7 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; +@ServerlessScope(Scope.PUBLIC) public class RestInferenceAction extends BaseRestHandler { @Override public String getName() { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestPutInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestPutInferenceModelAction.java index 7c9bf400e998b..9a1f15f08a89a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestPutInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestPutInferenceModelAction.java @@ -10,6 +10,8 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.Scope; +import org.elasticsearch.rest.ServerlessScope; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.inference.action.PutInferenceModelAction; @@ -18,6 +20,7 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; +@ServerlessScope(Scope.PUBLIC) public class RestPutInferenceModelAction extends BaseRestHandler { @Override public String getName() { From b99bccf98b83b1bc50f803395c2a1a652496ed4e Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Tue, 6 Feb 2024 15:51:55 +0100 Subject: [PATCH 065/106] Rename QueryPhaseTimeoutException to SearchTimeoutException (#105182) The newly introduced QueryPhaseTimeoutException extends from QueryPhaseExecutionException. It can instead subclass directly SearchException. That allows us to enforce that QueryPhaseExecutionException always has a root cause provided, which helps us enforce that its status code never defaults to 500. These are no serialization changes involved in this change, as QueryPhaseExecutionException exposes the exact same instance members as SearchException, which get serialized over the wire. This change does rename the transport version contant introduced in #10468, but that is safe as the version id is left untouched. It is a cosmetic change to ensure consistency between the constant name and the now renamed exception. --- .../java/org/elasticsearch/ElasticsearchException.java | 10 +++++----- .../main/java/org/elasticsearch/TransportVersions.java | 2 +- .../org/elasticsearch/search/query/QueryPhase.java | 2 +- .../search/query/QueryPhaseExecutionException.java | 9 +++------ ...meoutException.java => SearchTimeoutException.java} | 9 +++++---- .../org/elasticsearch/ExceptionSerializationTests.java | 4 ++-- 6 files changed, 17 insertions(+), 19 deletions(-) rename server/src/main/java/org/elasticsearch/search/query/{QueryPhaseTimeoutException.java => SearchTimeoutException.java} (69%) diff --git a/server/src/main/java/org/elasticsearch/ElasticsearchException.java b/server/src/main/java/org/elasticsearch/ElasticsearchException.java index 656d213e7a1fd..10a97930b85e5 100644 --- a/server/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/server/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -37,7 +37,7 @@ import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.MultiBucketConsumerService; import org.elasticsearch.search.aggregations.UnsupportedAggregationOnDownsampledIndex; -import org.elasticsearch.search.query.QueryPhaseTimeoutException; +import org.elasticsearch.search.query.SearchTimeoutException; import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentFragment; @@ -1898,11 +1898,11 @@ private enum ElasticsearchExceptionHandle { 175, TransportVersions.MISSED_INDICES_UPDATE_EXCEPTION_ADDED ), - QUERY_PHASE_TIMEOUT_EXCEPTION( - QueryPhaseTimeoutException.class, - QueryPhaseTimeoutException::new, + SEARCH_TIMEOUT_EXCEPTION( + SearchTimeoutException.class, + SearchTimeoutException::new, 176, - TransportVersions.QUERY_PHASE_TIMEOUT_EXCEPTION_ADDED + TransportVersions.SEARCH_TIMEOUT_EXCEPTION_ADDED ); final Class exceptionClass; diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index cd7f9eb756b91..86d7cf9c718df 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -170,7 +170,7 @@ static TransportVersion def(int id) { public static final TransportVersion KNN_QUERY_NUMCANDS_AS_OPTIONAL_PARAM = def(8_583_00_0); public static final TransportVersion TRANSFORM_GET_BASIC_STATS = def(8_584_00_0); public static final TransportVersion NLP_DOCUMENT_CHUNKING_ADDED = def(8_585_00_0); - public static final TransportVersion QUERY_PHASE_TIMEOUT_EXCEPTION_ADDED = def(8_586_00_0); + public static final TransportVersion SEARCH_TIMEOUT_EXCEPTION_ADDED = def(8_586_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java index 2368eeb18b021..29cf80b75a22a 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -210,7 +210,7 @@ static void addCollectorsAndSearch(SearchContext searchContext) throws QueryPhas if (searcher.timeExceeded()) { assert timeoutRunnable != null : "TimeExceededException thrown even though timeout wasn't set"; if (searchContext.request().allowPartialSearchResults() == false) { - throw new QueryPhaseTimeoutException(searchContext.shardTarget(), "Time exceeded"); + throw new SearchTimeoutException(searchContext.shardTarget(), "Time exceeded"); } queryResult.searchTimedOut(true); } diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhaseExecutionException.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhaseExecutionException.java index 73ec561a7ee26..106f8c82629eb 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhaseExecutionException.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhaseExecutionException.java @@ -13,18 +13,15 @@ import org.elasticsearch.search.SearchShardTarget; import java.io.IOException; +import java.util.Objects; -public class QueryPhaseExecutionException extends SearchException { +public final class QueryPhaseExecutionException extends SearchException { public QueryPhaseExecutionException(SearchShardTarget shardTarget, String msg, Throwable cause) { - super(shardTarget, "Query Failed [" + msg + "]", cause); + super(shardTarget, "Query Failed [" + msg + "]", Objects.requireNonNull(cause, "cause cannot be null")); } public QueryPhaseExecutionException(StreamInput in) throws IOException { super(in); } - - public QueryPhaseExecutionException(SearchShardTarget shardTarget, String msg) { - super(shardTarget, msg); - } } diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhaseTimeoutException.java b/server/src/main/java/org/elasticsearch/search/query/SearchTimeoutException.java similarity index 69% rename from server/src/main/java/org/elasticsearch/search/query/QueryPhaseTimeoutException.java rename to server/src/main/java/org/elasticsearch/search/query/SearchTimeoutException.java index 1b41f31ea1c82..37a3b9d6b8787 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhaseTimeoutException.java +++ b/server/src/main/java/org/elasticsearch/search/query/SearchTimeoutException.java @@ -10,20 +10,21 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.SearchException; import org.elasticsearch.search.SearchShardTarget; import java.io.IOException; /** - * Specific instance of QueryPhaseExecutionException that indicates that a search timeout occurred. + * Specific instance of {@link SearchException} that indicates that a search timeout occurred. * Always returns http status 504 (Gateway Timeout) */ -public class QueryPhaseTimeoutException extends QueryPhaseExecutionException { - public QueryPhaseTimeoutException(SearchShardTarget shardTarget, String msg) { +public class SearchTimeoutException extends SearchException { + public SearchTimeoutException(SearchShardTarget shardTarget, String msg) { super(shardTarget, msg); } - public QueryPhaseTimeoutException(StreamInput in) throws IOException { + public SearchTimeoutException(StreamInput in) throws IOException { super(in); } diff --git a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index 9d5c47fbccbc6..134480eb839d3 100644 --- a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -80,7 +80,7 @@ import org.elasticsearch.search.aggregations.MultiBucketConsumerService; import org.elasticsearch.search.aggregations.UnsupportedAggregationOnDownsampledIndex; import org.elasticsearch.search.internal.ShardSearchContextId; -import org.elasticsearch.search.query.QueryPhaseTimeoutException; +import org.elasticsearch.search.query.SearchTimeoutException; import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotException; import org.elasticsearch.snapshots.SnapshotId; @@ -828,7 +828,7 @@ public void testIds() { ids.put(173, TooManyScrollContextsException.class); ids.put(174, AggregationExecutionException.InvalidPath.class); ids.put(175, AutoscalingMissedIndicesUpdateException.class); - ids.put(176, QueryPhaseTimeoutException.class); + ids.put(176, SearchTimeoutException.class); Map, Integer> reverse = new HashMap<>(); for (Map.Entry> entry : ids.entrySet()) { From 8a3920ab8574bad4aa114c9d63cd8644694c8146 Mon Sep 17 00:00:00 2001 From: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Date: Tue, 6 Feb 2024 15:55:41 +0100 Subject: [PATCH 066/106] [DOCS] Update `KEEP` command with duplicate precedence rules (#105146) --- .../esql/processing-commands/keep.asciidoc | 60 ++++++++++++++++++- .../src/main/resources/docs.csv-spec | 40 +++++++++++++ 2 files changed, 99 insertions(+), 1 deletion(-) diff --git a/docs/reference/esql/processing-commands/keep.asciidoc b/docs/reference/esql/processing-commands/keep.asciidoc index 7515583b1bfd1..57f32a68aec4c 100644 --- a/docs/reference/esql/processing-commands/keep.asciidoc +++ b/docs/reference/esql/processing-commands/keep.asciidoc @@ -10,6 +10,7 @@ KEEP columns ---- *Parameters* + `columns`:: A comma-separated list of columns to keep. Supports wildcards. @@ -18,6 +19,17 @@ A comma-separated list of columns to keep. Supports wildcards. The `KEEP` processing command enables you to specify what columns are returned and the order in which they are returned. +Precedence rules are applied when a field name matches multiple expressions. +Fields are added in the order they appear. If one field matches multiple expressions, the following precedence rules apply (from highest to lowest priority): + +1. Complete field name (no wildcards) +2. Partial wildcard expressions (for example: `fieldNam*`) +3. Wildcard only (`*`) + +If a field matches two expressions with the same precedence, the right-most expression wins. + +Refer to the examples for illustrations of these precedence rules. + *Examples* The columns are returned in the specified order: @@ -38,12 +50,58 @@ columns with a name that matches a pattern: ---- include::{esql-specs}/docs.csv-spec[tag=keepWildcard] ---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/docs.csv-spec[tag=keep-wildcard-result] +|=== The asterisk wildcard (`*`) by itself translates to all columns that do not -match the other arguments. This query will first return all columns with a name +match the other arguments. + +This query will first return all columns with a name that starts with `h`, followed by all other columns: [source,esql] ---- include::{esql-specs}/docs.csv-spec[tag=keepDoubleWildcard] ---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/docs.csv-spec[tag=keep-double-wildcard-result] +|=== + +The following examples show how precedence rules work when a field name matches multiple expressions. + +Complete field name has precedence over wildcard expressions: + +[source,esql] +---- +include::{esql-specs}/docs.csv-spec[tag=keepCompleteName] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/docs.csv-spec[tag=keep-complete-name-result] +|=== + +Wildcard expressions have the same priority, but last one wins (despite being less specific): + +[source,esql] +---- +include::{esql-specs}/docs.csv-spec[tag=keepWildcardPrecedence] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/docs.csv-spec[tag=keep-wildcard-precedence-result] +|=== + +A simple wildcard expression `*` has the lowest precedence. +Output order is determined by the other arguments: + +[source,esql] +---- +include::{esql-specs}/docs.csv-spec[tag=keepWildcardLowest] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/docs.csv-spec[tag=keep-wildcard-lowest-result] +|=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec index 3fe19942bdfaa..2c707a7a87c4f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec @@ -61,7 +61,9 @@ FROM employees // end::keepWildcard[] | LIMIT 0; +// tag::keep-wildcard-result[] height:double | height.float:double | height.half_float:double | height.scaled_float:double | hire_date:date +// end::keep-wildcard-result[] ; docsKeepDoubleWildcard @@ -71,7 +73,45 @@ FROM employees // end::keepDoubleWildcard[] | LIMIT 0; +// tag::keep-double-wildcard-result[] height:double | height.float:double | height.half_float:double | height.scaled_float:double | hire_date:date | avg_worked_seconds:long | birth_date:date | emp_no:integer | first_name:keyword | gender:keyword | is_rehired:boolean | job_positions:keyword | languages:integer | languages.byte:integer | languages.long:long | languages.short:integer | last_name:keyword | salary:integer | salary_change:double | salary_change.int:integer |salary_change.keyword:keyword |salary_change.long:long |still_hired:boolean +// end::keep-double-wildcard-result[] +; + +docsKeepCompleteName#[skip:-8.12.99, reason:duplicate precedence rules added in 8.13] +// tag::keepCompleteName[] +FROM employees +| KEEP first_name, last_name, first_name* +// end::keepCompleteName[] +| LIMIT 0; + +// tag::keep-complete-name-result[] +first_name:keyword | last_name:keyword +// end::keep-complete-name-result[] +; + +docsKeepWildcardPrecedence#[skip:-8.12.99, reason:duplicate precedence rules added in 8.13] +// tag::keepWildcardPrecedence[] +FROM employees +| KEEP first_name*, last_name, first_na* +// end::keepWildcardPrecedence[] +| LIMIT 0; + +// tag::keep-wildcard-precedence-result[] +last_name:keyword | first_name:keyword +// end::keep-wildcard-precedence-result[] +; + +docsKeepWildcardLowest#[skip:-8.12.99, reason:duplicate precedence rules added in 8.13] +// tag::keepWildcardLowest[] +FROM employees +| KEEP *, first_name +// end::keepWildcardLowest[] +| LIMIT 0; + +// tag::keep-wildcard-lowest-result[] +avg_worked_seconds:long|birth_date:date|emp_no:integer|gender:keyword|height:double|height.float:double|height.half_float:double|height.scaled_float:double|hire_date:date|is_rehired:boolean|job_positions:keyword|languages:integer|languages.byte:integer|languages.long:long|languages.short:integer|last_name:keyword|salary:integer|salary_change:double|salary_change.int:integer|salary_change.keyword:keyword|salary_change.long:long|still_hired:boolean|first_name:keyword +// end::keep-wildcard-lowest-result[] ; docsRename From d392cd7d56cc7db7447af8b90aa1a7a9dce6d0a1 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Tue, 6 Feb 2024 10:44:20 -0500 Subject: [PATCH 067/106] Tidy up collections code (#105085) --- .../ingest/geoip/AbstractGeoIpIT.java | 3 +-- .../ingest/geoip/GeoIpDownloaderIT.java | 19 ++++++------------- .../ingest/geoip/GeoIpDownloaderStatsIT.java | 3 +-- .../ingest/geoip/GeoIpDownloaderTaskIT.java | 4 ++-- .../geoip/GeoIpProcessorNonIngestNodeIT.java | 4 ++-- .../geoip/GeoIpDownloaderTaskExecutor.java | 2 +- .../ingest/geoip/GeoIpTaskState.java | 3 +-- .../geoip/DatabaseNodeServiceTests.java | 11 +++++------ .../ingest/geoip/GeoIpDownloaderTests.java | 15 +++++++-------- .../geoip/GeoIpProcessorFactoryTests.java | 11 +++++------ .../ingest/geoip/GeoIpProcessorTests.java | 19 +++++++++---------- ...erStatsActionResponseSerializingTests.java | 3 +-- .../IngestGeoIpClientYamlTestSuiteIT.java | 3 +-- 13 files changed, 42 insertions(+), 58 deletions(-) diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/AbstractGeoIpIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/AbstractGeoIpIT.java index 791aa5185ac30..ae811db226b06 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/AbstractGeoIpIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/AbstractGeoIpIT.java @@ -24,7 +24,6 @@ import java.io.UncheckedIOException; import java.nio.file.Files; import java.nio.file.Path; -import java.util.Arrays; import java.util.Collection; import java.util.List; @@ -40,7 +39,7 @@ protected String getEndpoint() { @Override protected Collection> nodePlugins() { - return Arrays.asList(IngestGeoIpPlugin.class, IngestGeoIpSettingsPlugin.class); + return List.of(IngestGeoIpPlugin.class, IngestGeoIpSettingsPlugin.class); } @Override diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java index 438b5f3f5efcd..54d465aecda52 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java @@ -52,7 +52,6 @@ import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; @@ -87,7 +86,7 @@ public class GeoIpDownloaderIT extends AbstractGeoIpIT { @Override protected Collection> nodePlugins() { - return Arrays.asList( + return List.of( ReindexPlugin.class, IngestGeoIpPlugin.class, GeoIpProcessorNonIngestNodeIT.IngestGeoIpSettingsPlugin.class, @@ -131,10 +130,7 @@ public void cleanUp() throws Exception { for (GeoIpDownloaderStatsAction.NodeResponse nodeResponse : response.getNodes()) { assertThat(nodeResponse.getConfigDatabases(), empty()); assertThat(nodeResponse.getDatabases(), empty()); - assertThat( - nodeResponse.getFilesInTemp().stream().filter(s -> s.endsWith(".txt") == false).collect(Collectors.toList()), - empty() - ); + assertThat(nodeResponse.getFilesInTemp().stream().filter(s -> s.endsWith(".txt") == false).toList(), empty()); } }); assertBusy(() -> { @@ -386,7 +382,7 @@ public void testUseGeoIpProcessorWithDownloadedDBs() throws Exception { assertBusy(() -> { for (Path geoipTmpDir : geoipTmpDirs) { try (Stream list = Files.list(geoipTmpDir)) { - List files = list.map(Path::getFileName).map(Path::toString).collect(Collectors.toList()); + List files = list.map(Path::getFileName).map(Path::toString).toList(); assertThat( files, containsInAnyOrder( @@ -417,7 +413,7 @@ public void testUseGeoIpProcessorWithDownloadedDBs() throws Exception { assertBusy(() -> { for (Path geoipTmpDir : geoipTmpDirs) { try (Stream list = Files.list(geoipTmpDir)) { - List files = list.map(Path::toString).filter(p -> p.endsWith(".mmdb")).collect(Collectors.toList()); + List files = list.map(Path::toString).filter(p -> p.endsWith(".mmdb")).toList(); assertThat(files, empty()); } } @@ -681,7 +677,7 @@ private List getGeoIpTmpDirs() throws IOException { assertThat(Files.exists(geoipBaseTmpDir), is(true)); final List geoipTmpDirs; try (Stream files = Files.list(geoipBaseTmpDir)) { - geoipTmpDirs = files.filter(path -> ids.contains(path.getFileName().toString())).collect(Collectors.toList()); + geoipTmpDirs = files.filter(path -> ids.contains(path.getFileName().toString())).toList(); } assertThat(geoipTmpDirs.size(), equalTo(internalCluster().numDataNodes())); return geoipTmpDirs; @@ -718,10 +714,7 @@ private void setupDatabasesInConfigDirectory() throws Exception { containsInAnyOrder("GeoLite2-Country.mmdb", "GeoLite2-City.mmdb", "GeoLite2-ASN.mmdb") ); assertThat(nodeResponse.getDatabases(), empty()); - assertThat( - nodeResponse.getFilesInTemp().stream().filter(s -> s.endsWith(".txt") == false).collect(Collectors.toList()), - empty() - ); + assertThat(nodeResponse.getFilesInTemp().stream().filter(s -> s.endsWith(".txt") == false).toList(), empty()); } }); } diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderStatsIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderStatsIT.java index 7b27c9d3246a0..77b0faeeb6ebd 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderStatsIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderStatsIT.java @@ -24,7 +24,6 @@ import org.junit.After; import java.io.IOException; -import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Map; @@ -43,7 +42,7 @@ public class GeoIpDownloaderStatsIT extends AbstractGeoIpIT { @Override protected Collection> nodePlugins() { - return Arrays.asList(ReindexPlugin.class, IngestGeoIpPlugin.class, GeoIpProcessorNonIngestNodeIT.IngestGeoIpSettingsPlugin.class); + return List.of(ReindexPlugin.class, IngestGeoIpPlugin.class, GeoIpProcessorNonIngestNodeIT.IngestGeoIpSettingsPlugin.class); } @Override diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskIT.java index ff7a727999862..473f1fb498e76 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskIT.java @@ -17,14 +17,14 @@ import org.elasticsearch.reindex.ReindexPlugin; import org.junit.After; -import java.util.Arrays; import java.util.Collection; +import java.util.List; public class GeoIpDownloaderTaskIT extends AbstractGeoIpIT { @Override protected Collection> nodePlugins() { - return Arrays.asList(ReindexPlugin.class, IngestGeoIpPlugin.class, IngestGeoIpSettingsPlugin.class); + return List.of(ReindexPlugin.class, IngestGeoIpPlugin.class, IngestGeoIpSettingsPlugin.class); } @Override diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpProcessorNonIngestNodeIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpProcessorNonIngestNodeIT.java index 8490b17d535c6..f34f647a01e05 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpProcessorNonIngestNodeIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpProcessorNonIngestNodeIT.java @@ -23,7 +23,7 @@ import java.io.IOException; import java.util.Arrays; -import java.util.Collections; +import java.util.Map; import static org.elasticsearch.test.NodeRoles.nonIngestNode; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -100,7 +100,7 @@ public void testLazyLoading() throws IOException { assertDatabaseLoadStatus(ingestNode, false); final IndexRequest indexRequest = new IndexRequest("index"); indexRequest.setPipeline("geoip"); - indexRequest.source(Collections.singletonMap("ip", "1.1.1.1")); + indexRequest.source(Map.of("ip", "1.1.1.1")); final DocWriteResponse indexResponse = client(ingestNode).index(indexRequest).actionGet(); assertThat(indexResponse.status(), equalTo(RestStatus.CREATED)); // now the geo-IP database should be loaded on the ingest node diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java index 1f170e0f796ff..322eb0666db07 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java @@ -268,7 +268,7 @@ private static List pipelineConfigurationsWithGeoIpProces return pipelineDefinitions.stream().filter(pipelineConfig -> { List> processors = (List>) pipelineConfig.getConfigAsMap().get(Pipeline.PROCESSORS_KEY); return hasAtLeastOneGeoipProcessor(processors, downloadDatabaseOnPipelineCreation); - }).collect(Collectors.toList()); + }).toList(); } /** diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java index 589950116e0af..bd393fe9840d7 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpTaskState.java @@ -26,7 +26,6 @@ import java.io.IOException; import java.time.Instant; import java.time.temporal.ChronoUnit; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -41,7 +40,7 @@ class GeoIpTaskState implements PersistentTaskState, VersionedNamedWriteable { private static final ParseField DATABASES = new ParseField("databases"); - static final GeoIpTaskState EMPTY = new GeoIpTaskState(Collections.emptyMap()); + static final GeoIpTaskState EMPTY = new GeoIpTaskState(Map.of()); @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceTests.java index cbb41dfa02c5f..34d5429142cec 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceTests.java @@ -78,7 +78,6 @@ import java.util.UUID; import java.util.concurrent.CopyOnWriteArrayList; import java.util.function.Consumer; -import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; import java.util.zip.GZIPInputStream; @@ -153,7 +152,7 @@ public void testCheckDatabases() throws Exception { ClusterState state = createClusterState(tasksCustomMetadata); int numPipelinesToBeReloaded = randomInt(4); - List pipelineIds = IntStream.range(0, numPipelinesToBeReloaded).mapToObj(String::valueOf).collect(Collectors.toList()); + List pipelineIds = IntStream.range(0, numPipelinesToBeReloaded).mapToObj(String::valueOf).toList(); when(ingestService.getPipelineWithProcessorType(any(), any())).thenReturn(pipelineIds); assertThat(databaseNodeService.getDatabase("GeoIP2-City.mmdb"), nullValue()); @@ -208,7 +207,7 @@ public void testCheckDatabases_dontCheckDatabaseOnNonIngestNode() throws Excepti assertThat(databaseNodeService.getDatabase("GeoIP2-City.mmdb"), nullValue()); verify(client, never()).search(any()); try (Stream files = Files.list(geoIpTmpDir.resolve("geoip-databases").resolve("nodeId"))) { - assertThat(files.collect(Collectors.toList()), empty()); + assertThat(files.toList(), empty()); } } @@ -228,7 +227,7 @@ public void testCheckDatabases_dontCheckDatabaseWhenNoDatabasesIndex() throws Ex assertThat(databaseNodeService.getDatabase("GeoIP2-City.mmdb"), nullValue()); verify(client, never()).search(any()); try (Stream files = Files.list(geoIpTmpDir.resolve("geoip-databases").resolve("nodeId"))) { - assertThat(files.collect(Collectors.toList()), empty()); + assertThat(files.toList(), empty()); } } @@ -243,7 +242,7 @@ public void testCheckDatabases_dontCheckDatabaseWhenGeoIpDownloadTask() throws E assertThat(databaseNodeService.getDatabase("GeoIP2-City.mmdb"), nullValue()); verify(client, never()).search(any()); try (Stream files = Files.list(geoIpTmpDir.resolve("geoip-databases").resolve("nodeId"))) { - assertThat(files.collect(Collectors.toList()), empty()); + assertThat(files.toList(), empty()); } } @@ -290,7 +289,7 @@ public void testRetrieveDatabaseCorruption() throws Exception { public void testUpdateDatabase() throws Exception { int numPipelinesToBeReloaded = randomInt(4); - List pipelineIds = IntStream.range(0, numPipelinesToBeReloaded).mapToObj(String::valueOf).collect(Collectors.toList()); + List pipelineIds = IntStream.range(0, numPipelinesToBeReloaded).mapToObj(String::valueOf).toList(); when(ingestService.getPipelineWithProcessorType(any(), any())).thenReturn(pipelineIds); databaseNodeService.updateDatabase("_name", "_md5", geoIpTmpDir.resolve("some-file")); diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTests.java index 915d54c91b259..d84e1aac303d9 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTests.java @@ -48,7 +48,6 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; -import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; @@ -105,7 +104,7 @@ public void setup() { "", "", EMPTY_TASK_ID, - Collections.emptyMap(), + Map.of(), () -> GeoIpDownloaderTaskExecutor.POLL_INTERVAL_SETTING.getDefault(Settings.EMPTY), () -> GeoIpDownloaderTaskExecutor.EAGER_DOWNLOAD_SETTING.getDefault(Settings.EMPTY), () -> true @@ -271,7 +270,7 @@ public void testProcessDatabaseNew() throws IOException { "", "", EMPTY_TASK_ID, - Collections.emptyMap(), + Map.of(), () -> GeoIpDownloaderTaskExecutor.POLL_INTERVAL_SETTING.getDefault(Settings.EMPTY), () -> GeoIpDownloaderTaskExecutor.EAGER_DOWNLOAD_SETTING.getDefault(Settings.EMPTY), () -> true @@ -320,7 +319,7 @@ public void testProcessDatabaseUpdate() throws IOException { "", "", EMPTY_TASK_ID, - Collections.emptyMap(), + Map.of(), () -> GeoIpDownloaderTaskExecutor.POLL_INTERVAL_SETTING.getDefault(Settings.EMPTY), () -> GeoIpDownloaderTaskExecutor.EAGER_DOWNLOAD_SETTING.getDefault(Settings.EMPTY), () -> true @@ -371,7 +370,7 @@ public void testProcessDatabaseSame() throws IOException { "", "", EMPTY_TASK_ID, - Collections.emptyMap(), + Map.of(), () -> GeoIpDownloaderTaskExecutor.POLL_INTERVAL_SETTING.getDefault(Settings.EMPTY), () -> GeoIpDownloaderTaskExecutor.EAGER_DOWNLOAD_SETTING.getDefault(Settings.EMPTY), () -> true @@ -415,7 +414,7 @@ public void testUpdateTaskState() { "", "", EMPTY_TASK_ID, - Collections.emptyMap(), + Map.of(), () -> GeoIpDownloaderTaskExecutor.POLL_INTERVAL_SETTING.getDefault(Settings.EMPTY), () -> GeoIpDownloaderTaskExecutor.EAGER_DOWNLOAD_SETTING.getDefault(Settings.EMPTY), () -> true @@ -445,7 +444,7 @@ public void testUpdateTaskStateError() { "", "", EMPTY_TASK_ID, - Collections.emptyMap(), + Map.of(), () -> GeoIpDownloaderTaskExecutor.POLL_INTERVAL_SETTING.getDefault(Settings.EMPTY), () -> GeoIpDownloaderTaskExecutor.EAGER_DOWNLOAD_SETTING.getDefault(Settings.EMPTY), () -> true @@ -486,7 +485,7 @@ public void testUpdateDatabases() throws IOException { "", "", EMPTY_TASK_ID, - Collections.emptyMap(), + Map.of(), () -> GeoIpDownloaderTaskExecutor.POLL_INTERVAL_SETTING.getDefault(Settings.EMPTY), () -> GeoIpDownloaderTaskExecutor.EAGER_DOWNLOAD_SETTING.getDefault(Settings.EMPTY), atLeastOneGeoipProcessor::get diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java index 53a278ca7842b..dee9ba3189c26 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -37,7 +37,6 @@ import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.ArrayList; -import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.List; @@ -188,7 +187,7 @@ public void testBuildWithCountryDbAndAsnFields() throws Exception { EnumSet asnOnlyProperties = EnumSet.copyOf(GeoIpProcessor.Property.ALL_ASN_PROPERTIES); asnOnlyProperties.remove(GeoIpProcessor.Property.IP); String asnProperty = RandomPicks.randomFrom(Randomness.get(), asnOnlyProperties).toString(); - config.put("properties", Collections.singletonList(asnProperty)); + config.put("properties", List.of(asnProperty)); Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, null, config)); assertThat( e.getMessage(), @@ -208,7 +207,7 @@ public void testBuildWithAsnDbAndCityFields() throws Exception { EnumSet cityOnlyProperties = EnumSet.copyOf(GeoIpProcessor.Property.ALL_CITY_PROPERTIES); cityOnlyProperties.remove(GeoIpProcessor.Property.IP); String cityProperty = RandomPicks.randomFrom(Randomness.get(), cityOnlyProperties).toString(); - config.put("properties", Collections.singletonList(cityProperty)); + config.put("properties", List.of(cityProperty)); Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, null, config)); assertThat( e.getMessage(), @@ -271,7 +270,7 @@ public void testBuildIllegalFieldOption() throws Exception { Map config1 = new HashMap<>(); config1.put("field", "_field"); - config1.put("properties", Collections.singletonList("invalid")); + config1.put("properties", List.of("invalid")); Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, null, config1)); assertThat( e.getMessage(), @@ -314,7 +313,7 @@ public void testLazyLoading() throws Exception { assertNull(lazyLoader.databaseReader.get()); } - final Map field = Collections.singletonMap("_field", "1.1.1.1"); + final Map field = Map.of("_field", "1.1.1.1"); final IngestDocument document = new IngestDocument("index", "id", 1L, "routing", VersionType.EXTERNAL, field); Map config = new HashMap<>(); @@ -384,7 +383,7 @@ public void testLoadingCustomDatabase() throws IOException { assertNull(lazyLoader.databaseReader.get()); } - final Map field = Collections.singletonMap("_field", "1.1.1.1"); + final Map field = Map.of("_field", "1.1.1.1"); final IngestDocument document = new IngestDocument("index", "id", 1L, "routing", VersionType.EXTERNAL, field); Map config = new HashMap<>(); diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index 80c72238b9679..f5ad0e9c0817a 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -18,7 +18,6 @@ import java.io.IOException; import java.io.InputStream; -import java.util.Arrays; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; @@ -105,7 +104,7 @@ public void testNonExistentWithIgnoreMissing() throws Exception { false, "filename" ); - IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); + IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Map.of()); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); processor.execute(ingestDocument); assertIngestDocument(originalIngestDocument, ingestDocument); @@ -146,7 +145,7 @@ public void testNonExistentWithoutIgnoreMissing() throws Exception { false, "filename" ); - IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); + IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Map.of()); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); Exception exception = expectThrows(Exception.class, () -> processor.execute(ingestDocument)); assertThat(exception.getMessage(), equalTo("field [source_field] not present as part of path [source_field]")); @@ -361,7 +360,7 @@ public void testListAllValid() throws Exception { ); Map document = new HashMap<>(); - document.put("source_field", Arrays.asList("8.8.8.8", "82.171.64.0")); + document.put("source_field", List.of("8.8.8.8", "82.171.64.0")); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); @@ -391,7 +390,7 @@ public void testListPartiallyValid() throws Exception { ); Map document = new HashMap<>(); - document.put("source_field", Arrays.asList("8.8.8.8", "127.0.0.1")); + document.put("source_field", List.of("8.8.8.8", "127.0.0.1")); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); @@ -421,7 +420,7 @@ public void testListNoMatches() throws Exception { ); Map document = new HashMap<>(); - document.put("source_field", Arrays.asList("127.0.0.1", "127.0.0.1")); + document.put("source_field", List.of("127.0.0.1", "127.0.0.1")); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); @@ -437,7 +436,7 @@ public void testListDatabaseReferenceCounting() throws Exception { }, () -> true, "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, false, "filename"); Map document = new HashMap<>(); - document.put("source_field", Arrays.asList("8.8.8.8", "82.171.64.0")); + document.put("source_field", List.of("8.8.8.8", "82.171.64.0")); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); @@ -472,7 +471,7 @@ public void testListFirstOnly() throws Exception { ); Map document = new HashMap<>(); - document.put("source_field", Arrays.asList("8.8.8.8", "127.0.0.1")); + document.put("source_field", List.of("8.8.8.8", "127.0.0.1")); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); @@ -500,7 +499,7 @@ public void testListFirstOnlyNoMatches() throws Exception { ); Map document = new HashMap<>(); - document.put("source_field", Arrays.asList("127.0.0.1", "127.0.0.2")); + document.put("source_field", List.of("127.0.0.1", "127.0.0.2")); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); @@ -522,7 +521,7 @@ public void testInvalidDatabase() throws Exception { ); Map document = new HashMap<>(); - document.put("source_field", Arrays.asList("127.0.0.1", "127.0.0.2")); + document.put("source_field", List.of("127.0.0.1", "127.0.0.2")); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); processor.execute(ingestDocument); diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionResponseSerializingTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionResponseSerializingTests.java index b9fe1ee976d66..d566fa8838df1 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionResponseSerializingTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/stats/GeoIpDownloaderStatsActionResponseSerializingTests.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import java.util.Collections; import java.util.List; public class GeoIpDownloaderStatsActionResponseSerializingTests extends AbstractWireSerializingTestCase< @@ -29,7 +28,7 @@ protected GeoIpDownloaderStatsAction.Response createTestInstance() { 10, GeoIpDownloaderStatsActionNodeResponseSerializingTests::createRandomInstance ); - return new GeoIpDownloaderStatsAction.Response(ClusterName.DEFAULT, nodeResponses, Collections.emptyList()); + return new GeoIpDownloaderStatsAction.Response(ClusterName.DEFAULT, nodeResponses, List.of()); } @Override diff --git a/modules/ingest-geoip/src/yamlRestTest/java/org/elasticsearch/ingest/geoip/IngestGeoIpClientYamlTestSuiteIT.java b/modules/ingest-geoip/src/yamlRestTest/java/org/elasticsearch/ingest/geoip/IngestGeoIpClientYamlTestSuiteIT.java index 36b65119be6e7..58a6e3771b30d 100644 --- a/modules/ingest-geoip/src/yamlRestTest/java/org/elasticsearch/ingest/geoip/IngestGeoIpClientYamlTestSuiteIT.java +++ b/modules/ingest-geoip/src/yamlRestTest/java/org/elasticsearch/ingest/geoip/IngestGeoIpClientYamlTestSuiteIT.java @@ -31,7 +31,6 @@ import java.io.IOException; import java.util.List; import java.util.Map; -import java.util.stream.Collectors; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; @@ -82,7 +81,7 @@ public void waitForDatabases() throws Exception { Map node = (Map) nodes.values().iterator().next(); List databases = ((List) node.get("databases")); assertThat(databases, notNullValue()); - List databaseNames = databases.stream().map(o -> (String) ((Map) o).get("name")).collect(Collectors.toList()); + List databaseNames = databases.stream().map(o -> (String) ((Map) o).get("name")).toList(); assertThat( databaseNames, containsInAnyOrder("GeoLite2-City.mmdb", "GeoLite2-Country.mmdb", "GeoLite2-ASN.mmdb", "MyCustomGeoLite2-City.mmdb") From 341f84583262cfc670b903f762f05f759162f37a Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Tue, 6 Feb 2024 10:44:48 -0500 Subject: [PATCH 068/106] Ingest geoip: tidy up logging code (#105086) --- .../ingest/geoip/ConfigDatabases.java | 11 ++-- .../ingest/geoip/DatabaseNodeService.java | 57 +++++++++---------- .../geoip/DatabaseReaderLazyLoader.java | 8 +-- .../ingest/geoip/GeoIpDownloader.java | 2 +- .../ingest/geoip/GeoIpProcessor.java | 4 +- 5 files changed, 40 insertions(+), 42 deletions(-) diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/ConfigDatabases.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/ConfigDatabases.java index cc18d5a21389e..8711e014b840a 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/ConfigDatabases.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/ConfigDatabases.java @@ -9,7 +9,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.env.Environment; import org.elasticsearch.watcher.FileChangesListener; import org.elasticsearch.watcher.FileWatcher; @@ -34,7 +33,7 @@ */ final class ConfigDatabases implements Closeable { - private static final Logger LOGGER = LogManager.getLogger(ConfigDatabases.class); + private static final Logger logger = LogManager.getLogger(ConfigDatabases.class); private final GeoIpCache cache; private final Path geoipConfigDir; @@ -58,7 +57,7 @@ void initialize(ResourceWatcherService resourceWatcher) throws IOException { watcher.addListener(new GeoipDirectoryListener()); resourceWatcher.add(watcher, ResourceWatcherService.Frequency.HIGH); - LOGGER.debug("initialized config databases [{}] and watching [{}] for changes", configDatabases.keySet(), geoipConfigDir); + logger.debug("initialized config databases [{}] and watching [{}] for changes", configDatabases.keySet(), geoipConfigDir); } DatabaseReaderLazyLoader getDatabase(String name) { @@ -73,20 +72,20 @@ void updateDatabase(Path file, boolean update) { String databaseFileName = file.getFileName().toString(); try { if (update) { - LOGGER.info("database file changed [{}], reload database...", file); + logger.info("database file changed [{}], reload database...", file); DatabaseReaderLazyLoader loader = new DatabaseReaderLazyLoader(cache, file, null); DatabaseReaderLazyLoader existing = configDatabases.put(databaseFileName, loader); if (existing != null) { existing.close(); } } else { - LOGGER.info("database file removed [{}], close database...", file); + logger.info("database file removed [{}], close database...", file); DatabaseReaderLazyLoader existing = configDatabases.remove(databaseFileName); assert existing != null; existing.close(); } } catch (Exception e) { - LOGGER.error((Supplier) () -> "failed to update database [" + databaseFileName + "]", e); + logger.error(() -> "failed to update database [" + databaseFileName + "]", e); } } diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java index ec17915f7d622..540f463be469c 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java @@ -9,7 +9,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -87,7 +86,7 @@ */ public final class DatabaseNodeService implements GeoIpDatabaseProvider, Closeable { - private static final Logger LOGGER = LogManager.getLogger(DatabaseNodeService.class); + private static final Logger logger = LogManager.getLogger(DatabaseNodeService.class); private final Client client; private final GeoIpCache cache; @@ -145,10 +144,10 @@ public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) { try { - LOGGER.info("deleting stale file [{}]", file); + logger.info("deleting stale file [{}]", file); Files.deleteIfExists(file); } catch (IOException e) { - LOGGER.warn("can't delete stale file [" + file + "]", e); + logger.warn("can't delete stale file [" + file + "]", e); } return FileVisitResult.CONTINUE; } @@ -156,7 +155,7 @@ public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) { @Override public FileVisitResult visitFileFailed(Path file, IOException e) { if (e instanceof NoSuchFileException == false) { - LOGGER.warn("can't delete stale file [" + file + "]", e); + logger.warn("can't delete stale file [" + file + "]", e); } return FileVisitResult.CONTINUE; } @@ -169,7 +168,7 @@ public FileVisitResult postVisitDirectory(Path dir, IOException exc) { if (Files.exists(geoipTmpDirectory) == false) { Files.createDirectories(geoipTmpDirectory); } - LOGGER.debug("initialized database node service, using geoip-databases directory [{}]", geoipTmpDirectory); + logger.debug("initialized database node service, using geoip-databases directory [{}]", geoipTmpDirectory); this.ingestService = ingestServiceArg; clusterService.addListener(event -> checkDatabases(event.state())); } @@ -246,26 +245,26 @@ void checkDatabases(ClusterState state) { DiscoveryNode localNode = state.nodes().getLocalNode(); if (localNode.isIngestNode() == false) { - LOGGER.trace("Not checking databases because local node is not ingest node"); + logger.trace("Not checking databases because local node is not ingest node"); return; } PersistentTasksCustomMetadata persistentTasks = state.metadata().custom(PersistentTasksCustomMetadata.TYPE); if (persistentTasks == null) { - LOGGER.trace("Not checking databases because persistent tasks are null"); + logger.trace("Not checking databases because persistent tasks are null"); return; } IndexAbstraction databasesAbstraction = state.getMetadata().getIndicesLookup().get(GeoIpDownloader.DATABASES_INDEX); if (databasesAbstraction == null) { - LOGGER.trace("Not checking databases because geoip databases index does not exist"); + logger.trace("Not checking databases because geoip databases index does not exist"); return; } else { // regardless of whether DATABASES_INDEX is an alias, resolve it to a concrete index Index databasesIndex = databasesAbstraction.getWriteIndex(); IndexRoutingTable databasesIndexRT = state.getRoutingTable().index(databasesIndex); if (databasesIndexRT == null || databasesIndexRT.allPrimaryShardsActive() == false) { - LOGGER.trace("Not checking databases because geoip databases index does not have all active primary shards"); + logger.trace("Not checking databases because geoip databases index does not have all active primary shards"); return; } } @@ -284,14 +283,14 @@ void checkDatabases(ClusterState state) { String remoteMd5 = metadata.md5(); String localMd5 = reference != null ? reference.getMd5() : null; if (Objects.equals(localMd5, remoteMd5)) { - LOGGER.debug("Current reference of [{}] is up to date [{}] with was recorded in CS [{}]", name, localMd5, remoteMd5); + logger.debug("Current reference of [{}] is up to date [{}] with was recorded in CS [{}]", name, localMd5, remoteMd5); return; } try { retrieveAndUpdateDatabase(name, metadata); } catch (Exception ex) { - LOGGER.error((Supplier) () -> "attempt to download database [" + name + "] failed", ex); + logger.error(() -> "attempt to download database [" + name + "] failed", ex); } }); @@ -308,7 +307,7 @@ void checkDatabases(ClusterState state) { } void retrieveAndUpdateDatabase(String databaseName, GeoIpTaskState.Metadata metadata) throws IOException { - LOGGER.trace("Retrieving database {}", databaseName); + logger.trace("Retrieving database {}", databaseName); final String recordedMd5 = metadata.md5(); // This acts as a lock, if this method for a specific db is executed later and downloaded for this db is still ongoing then @@ -318,7 +317,7 @@ void retrieveAndUpdateDatabase(String databaseName, GeoIpTaskState.Metadata meta try { databaseTmpGzFile = Files.createFile(geoipTmpDirectory.resolve(databaseName + ".tmp.gz")); } catch (FileAlreadyExistsException e) { - LOGGER.debug("database update [{}] already in progress, skipping...", databaseName); + logger.debug("database update [{}] already in progress, skipping...", databaseName); return; } @@ -330,20 +329,20 @@ void retrieveAndUpdateDatabase(String databaseName, GeoIpTaskState.Metadata meta // twice. This check is here to avoid this: DatabaseReaderLazyLoader lazyLoader = databases.get(databaseName); if (lazyLoader != null && recordedMd5.equals(lazyLoader.getMd5())) { - LOGGER.debug("deleting tmp file because database [{}] has already been updated.", databaseName); + logger.debug("deleting tmp file because database [{}] has already been updated.", databaseName); Files.delete(databaseTmpGzFile); return; } final Path databaseTmpFile = Files.createFile(geoipTmpDirectory.resolve(databaseName + ".tmp")); - LOGGER.debug("retrieve geoip database [{}] from [{}] to [{}]", databaseName, GeoIpDownloader.DATABASES_INDEX, databaseTmpGzFile); + logger.debug("retrieve geoip database [{}] from [{}] to [{}]", databaseName, GeoIpDownloader.DATABASES_INDEX, databaseTmpGzFile); retrieveDatabase( databaseName, recordedMd5, metadata, bytes -> Files.write(databaseTmpGzFile, bytes, StandardOpenOption.APPEND), () -> { - LOGGER.debug("decompressing [{}]", databaseTmpGzFile.getFileName()); + logger.debug("decompressing [{}]", databaseTmpGzFile.getFileName()); Path databaseFile = geoipTmpDirectory.resolve(databaseName); // tarball contains .mmdb, LICENSE.txt, COPYRIGHTS.txt and optional README.txt files. @@ -369,19 +368,19 @@ void retrieveAndUpdateDatabase(String databaseName, GeoIpTaskState.Metadata meta } } - LOGGER.debug("moving database from [{}] to [{}]", databaseTmpFile, databaseFile); + logger.debug("moving database from [{}] to [{}]", databaseTmpFile, databaseFile); Files.move(databaseTmpFile, databaseFile, StandardCopyOption.ATOMIC_MOVE, StandardCopyOption.REPLACE_EXISTING); updateDatabase(databaseName, recordedMd5, databaseFile); Files.delete(databaseTmpGzFile); }, failure -> { - LOGGER.error((Supplier) () -> "failed to retrieve database [" + databaseName + "]", failure); + logger.error(() -> "failed to retrieve database [" + databaseName + "]", failure); try { Files.deleteIfExists(databaseTmpFile); Files.deleteIfExists(databaseTmpGzFile); } catch (IOException ioe) { ioe.addSuppressed(failure); - LOGGER.error("Unable to delete tmp database file after failure", ioe); + logger.error("Unable to delete tmp database file after failure", ioe); } } ); @@ -389,7 +388,7 @@ void retrieveAndUpdateDatabase(String databaseName, GeoIpTaskState.Metadata meta void updateDatabase(String databaseFileName, String recordedMd5, Path file) { try { - LOGGER.debug("starting reload of changed geoip database file [{}]", file); + logger.debug("starting reload of changed geoip database file [{}]", file); DatabaseReaderLazyLoader loader = new DatabaseReaderLazyLoader(cache, file, recordedMd5); DatabaseReaderLazyLoader existing = databases.put(databaseFileName, loader); if (existing != null) { @@ -399,41 +398,41 @@ void updateDatabase(String databaseFileName, String recordedMd5, Path file) { Predicate predicate = p -> databaseFileName.equals(p.getDatabaseName()); var ids = ingestService.getPipelineWithProcessorType(GeoIpProcessor.DatabaseUnavailableProcessor.class, predicate); if (ids.isEmpty() == false) { - LOGGER.debug("pipelines [{}] found to reload", ids); + logger.debug("pipelines [{}] found to reload", ids); for (var id : ids) { try { ingestService.reloadPipeline(id); - LOGGER.trace( + logger.trace( "successfully reloaded pipeline [{}] after downloading of database [{}] for the first time", id, databaseFileName ); } catch (Exception e) { - LOGGER.debug( + logger.debug( () -> format("failed to reload pipeline [%s] after downloading of database [%s]", id, databaseFileName), e ); } } } else { - LOGGER.debug("no pipelines found to reload"); + logger.debug("no pipelines found to reload"); } } - LOGGER.info("successfully loaded geoip database file [{}]", file.getFileName()); + logger.info("successfully loaded geoip database file [{}]", file.getFileName()); } catch (Exception e) { - LOGGER.error((Supplier) () -> "failed to update database [" + databaseFileName + "]", e); + logger.error(() -> "failed to update database [" + databaseFileName + "]", e); } } void removeStaleEntries(Collection staleEntries) { for (String staleEntry : staleEntries) { try { - LOGGER.debug("database [{}] no longer exists, cleaning up...", staleEntry); + logger.debug("database [{}] no longer exists, cleaning up...", staleEntry); DatabaseReaderLazyLoader existing = databases.remove(staleEntry); assert existing != null; existing.close(true); } catch (Exception e) { - LOGGER.error((Supplier) () -> "failed to clean database [" + staleEntry + "]", e); + logger.error(() -> "failed to clean database [" + staleEntry + "]", e); } } } diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseReaderLazyLoader.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseReaderLazyLoader.java index 3519113033b41..3a02c08360b1e 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseReaderLazyLoader.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseReaderLazyLoader.java @@ -45,7 +45,7 @@ class DatabaseReaderLazyLoader implements GeoIpDatabase, Closeable { private static final boolean LOAD_DATABASE_ON_HEAP = Booleans.parseBoolean(System.getProperty("es.geoip.load_db_on_heap", "false")); - private static final Logger LOGGER = LogManager.getLogger(DatabaseReaderLazyLoader.class); + private static final Logger logger = LogManager.getLogger(DatabaseReaderLazyLoader.class); private final String md5; private final GeoIpCache cache; @@ -203,7 +203,7 @@ DatabaseReader get() throws IOException { synchronized (databaseReader) { if (databaseReader.get() == null) { databaseReader.set(loader.get()); - LOGGER.debug("loaded [{}] geo-IP database", databasePath); + logger.debug("loaded [{}] geo-IP database", databasePath); } } } @@ -230,9 +230,9 @@ public void close() throws IOException { protected void doClose() throws IOException { IOUtils.close(databaseReader.get()); int numEntriesEvicted = cache.purgeCacheEntriesForDatabase(databasePath); - LOGGER.info("evicted [{}] entries from cache after reloading database [{}]", numEntriesEvicted, databasePath); + logger.info("evicted [{}] entries from cache after reloading database [{}]", numEntriesEvicted, databasePath); if (deleteDatabaseFileOnClose) { - LOGGER.info("deleting [{}]", databasePath); + logger.info("deleting [{}]", databasePath); Files.delete(databasePath); } } diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloader.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloader.java index 3e04f7bfea2de..713e5111853a7 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloader.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloader.java @@ -188,7 +188,7 @@ void processDatabase(Map databaseInfo) { } } catch (Exception e) { stats = stats.failedDownload(); - logger.error((org.apache.logging.log4j.util.Supplier) () -> "error downloading geoip database [" + name + "]", e); + logger.error(() -> "error downloading geoip database [" + name + "]", e); } } diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index 42dddf4c83ef3..6a5fb9007377b 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -48,7 +48,7 @@ public final class GeoIpProcessor extends AbstractProcessor { - private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(GeoIpProcessor.class); + private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(GeoIpProcessor.class); static final String DEFAULT_DATABASES_DEPRECATION_MESSAGE = "the [fallback_to_default_databases] has been deprecated, because " + "Elasticsearch no longer includes the default Maxmind geoip databases. This setting will be removed in Elasticsearch 9.0"; @@ -429,7 +429,7 @@ public Processor create( // noop, should be removed in 9.0 Object value = config.remove("fallback_to_default_databases"); if (value != null) { - DEPRECATION_LOGGER.warn(DeprecationCategory.OTHER, "default_databases_message", DEFAULT_DATABASES_DEPRECATION_MESSAGE); + deprecationLogger.warn(DeprecationCategory.OTHER, "default_databases_message", DEFAULT_DATABASES_DEPRECATION_MESSAGE); } GeoIpDatabase geoIpDatabase = geoIpDatabaseProvider.getDatabase(databaseFile); From e3b6a657f43e8823b607ad299eae50e46c7a14f3 Mon Sep 17 00:00:00 2001 From: Carlos Delgado <6339205+carlosdelest@users.noreply.github.com> Date: Tue, 6 Feb 2024 16:56:46 +0100 Subject: [PATCH 069/106] Move semantic_text field mappers to inference plugin (#105187) --- .../inference/src/main/java/module-info.java | 1 + .../xpack/inference/InferencePlugin.java | 27 ++++++++++++++++++- .../xpack/inference}/SemanticTextFeature.java | 2 +- .../mapper/SemanticTextFieldMapper.java | 2 +- ...emanticTextInferenceResultFieldMapper.java | 2 +- .../SemanticTextClusterMetadataTests.java | 13 +++++++-- .../mapper/SemanticTextFieldMapperTests.java | 6 ++--- ...icTextInferenceResultFieldMapperTests.java | 8 +++--- .../xpack/ml/MachineLearning.java | 21 +-------------- .../xpack/ml/LocalStateMachineLearning.java | 6 ----- 10 files changed, 49 insertions(+), 39 deletions(-) rename x-pack/plugin/{ml/src/main/java/org/elasticsearch/xpack/ml => inference/src/main/java/org/elasticsearch/xpack/inference}/SemanticTextFeature.java (93%) rename x-pack/plugin/{ml/src/main/java/org/elasticsearch/xpack/ml => inference/src/main/java/org/elasticsearch/xpack/inference}/mapper/SemanticTextFieldMapper.java (98%) rename x-pack/plugin/{ml/src/main/java/org/elasticsearch/xpack/ml => inference/src/main/java/org/elasticsearch/xpack/inference}/mapper/SemanticTextInferenceResultFieldMapper.java (99%) rename x-pack/plugin/{ml => inference}/src/test/java/org/elasticsearch/cluster/metadata/SemanticTextClusterMetadataTests.java (87%) rename x-pack/plugin/{ml/src/test/java/org/elasticsearch/xpack/ml => inference/src/test/java/org/elasticsearch/xpack/inference}/mapper/SemanticTextFieldMapperTests.java (96%) rename x-pack/plugin/{ml/src/test/java/org/elasticsearch/xpack/ml => inference/src/test/java/org/elasticsearch/xpack/inference}/mapper/SemanticTextInferenceResultFieldMapperTests.java (98%) diff --git a/x-pack/plugin/inference/src/main/java/module-info.java b/x-pack/plugin/inference/src/main/java/module-info.java index 2d25a48117778..ddd56c758d67c 100644 --- a/x-pack/plugin/inference/src/main/java/module-info.java +++ b/x-pack/plugin/inference/src/main/java/module-info.java @@ -17,6 +17,7 @@ requires org.apache.httpcomponents.httpasyncclient; requires org.apache.httpcomponents.httpcore.nio; requires org.apache.lucene.core; + requires org.elasticsearch.logging; exports org.elasticsearch.xpack.inference.action; exports org.elasticsearch.xpack.inference.registry; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 4e44929e7ba9b..a83f8bb5f9b5b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -21,6 +21,8 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.TimeValue; import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.inference.InferenceServiceExtension; import org.elasticsearch.inference.InferenceServiceRegistry; @@ -29,6 +31,7 @@ import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.ExtensiblePlugin; import org.elasticsearch.plugins.InferenceRegistryPlugin; +import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SystemIndexPlugin; import org.elasticsearch.rest.RestController; @@ -52,6 +55,8 @@ import org.elasticsearch.xpack.inference.external.http.retry.RetrySettings; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; +import org.elasticsearch.xpack.inference.mapper.SemanticTextFieldMapper; +import org.elasticsearch.xpack.inference.mapper.SemanticTextInferenceResultFieldMapper; import org.elasticsearch.xpack.inference.registry.ModelRegistryImpl; import org.elasticsearch.xpack.inference.rest.RestDeleteInferenceModelAction; import org.elasticsearch.xpack.inference.rest.RestGetInferenceModelAction; @@ -67,12 +72,19 @@ import java.util.ArrayList; import java.util.Collection; import java.util.List; +import java.util.Map; import java.util.function.Predicate; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; -public class InferencePlugin extends Plugin implements ActionPlugin, ExtensiblePlugin, SystemIndexPlugin, InferenceRegistryPlugin { +public class InferencePlugin extends Plugin + implements + ActionPlugin, + ExtensiblePlugin, + SystemIndexPlugin, + InferenceRegistryPlugin, + MapperPlugin { public static final String NAME = "inference"; public static final String UTILITY_THREAD_POOL_NAME = "inference_utility"; @@ -254,4 +266,17 @@ public InferenceServiceRegistry getInferenceServiceRegistry() { public ModelRegistry getModelRegistry() { return modelRegistry.get(); } + + @Override + public Map getMappers() { + if (SemanticTextFeature.isEnabled()) { + return Map.of(SemanticTextFieldMapper.CONTENT_TYPE, SemanticTextFieldMapper.PARSER); + } + return Map.of(); + } + + @Override + public Map getMetadataMappers() { + return Map.of(SemanticTextInferenceResultFieldMapper.NAME, SemanticTextInferenceResultFieldMapper.PARSER); + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/SemanticTextFeature.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/SemanticTextFeature.java similarity index 93% rename from x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/SemanticTextFeature.java rename to x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/SemanticTextFeature.java index f861760803e56..4f2c5c564bcb8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/SemanticTextFeature.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/SemanticTextFeature.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.ml; +package org.elasticsearch.xpack.inference; import org.elasticsearch.common.util.FeatureFlag; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java similarity index 98% rename from x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/mapper/SemanticTextFieldMapper.java rename to x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index 9546bc4ba9add..027b85a9a9f45 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.ml.mapper; +package org.elasticsearch.xpack.inference.mapper; import org.apache.lucene.search.Query; import org.elasticsearch.common.Strings; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/mapper/SemanticTextInferenceResultFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextInferenceResultFieldMapper.java similarity index 99% rename from x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/mapper/SemanticTextInferenceResultFieldMapper.java rename to x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextInferenceResultFieldMapper.java index ff224522034bf..5dda6ae3781ab 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/mapper/SemanticTextInferenceResultFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextInferenceResultFieldMapper.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.ml.mapper; +package org.elasticsearch.xpack.inference.mapper; import org.apache.lucene.search.Query; import org.elasticsearch.common.Strings; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/cluster/metadata/SemanticTextClusterMetadataTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/cluster/metadata/SemanticTextClusterMetadataTests.java similarity index 87% rename from x-pack/plugin/ml/src/test/java/org/elasticsearch/cluster/metadata/SemanticTextClusterMetadataTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/cluster/metadata/SemanticTextClusterMetadataTests.java index 47cae14003c70..69fa64ffa6d1c 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/cluster/metadata/SemanticTextClusterMetadataTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/cluster/metadata/SemanticTextClusterMetadataTests.java @@ -13,14 +13,23 @@ import org.elasticsearch.cluster.service.ClusterStateTaskExecutorUtils; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; -import org.elasticsearch.xpack.ml.MlSingleNodeTestCase; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xpack.inference.InferencePlugin; +import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; -public class SemanticTextClusterMetadataTests extends MlSingleNodeTestCase { +public class SemanticTextClusterMetadataTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return List.of(InferencePlugin.class); + } + public void testCreateIndexWithSemanticTextField() { final IndexService indexService = createIndex( "test", diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/mapper/SemanticTextFieldMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java similarity index 96% rename from x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/mapper/SemanticTextFieldMapperTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java index ccb8f106e4945..a3a705c9cc902 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/mapper/SemanticTextFieldMapperTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.ml.mapper; +package org.elasticsearch.xpack.inference.mapper; import org.apache.lucene.index.IndexableField; import org.elasticsearch.common.Strings; @@ -18,7 +18,7 @@ import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.inference.InferencePlugin; import org.junit.AssumptionViolatedException; import java.io.IOException; @@ -74,7 +74,7 @@ public void testUpdatesToModelIdNotSupported() throws IOException { @Override protected Collection getPlugins() { - return singletonList(new MachineLearning(Settings.EMPTY)); + return singletonList(new InferencePlugin(Settings.EMPTY)); } @Override diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/mapper/SemanticTextInferenceResultFieldMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextInferenceResultFieldMapperTests.java similarity index 98% rename from x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/mapper/SemanticTextInferenceResultFieldMapperTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextInferenceResultFieldMapperTests.java index bde6da7fe8277..7f13d34986482 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/mapper/SemanticTextInferenceResultFieldMapperTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextInferenceResultFieldMapperTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.ml.mapper; +package org.elasticsearch.xpack.inference.mapper; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; @@ -37,7 +37,7 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; -import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.inference.InferencePlugin; import java.io.IOException; import java.util.ArrayList; @@ -78,7 +78,7 @@ protected boolean isConfigurable() { @Override protected boolean isSupportedOn(IndexVersion version) { - return version.onOrAfter(IndexVersions.ES_VERSION_8_13); // TODO: Switch to ES_VERSION_8_14 when available + return version.onOrAfter(IndexVersions.ES_VERSION_8_12_1); // TODO: Switch to ES_VERSION_8_14 when available } @Override @@ -88,7 +88,7 @@ protected void registerParameters(ParameterChecker checker) throws IOException { @Override protected Collection getPlugins() { - return List.of(new MachineLearning(Settings.EMPTY)); + return List.of(new InferencePlugin(Settings.EMPTY)); } public void testSuccessfulParse() throws IOException { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index 10b2ed089d632..70a3b9bab49f1 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -49,8 +49,6 @@ import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.analysis.CharFilterFactory; import org.elasticsearch.index.analysis.TokenizerFactory; -import org.elasticsearch.index.mapper.Mapper; -import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.indices.AssociatedIndexDescriptor; import org.elasticsearch.indices.SystemIndexDescriptor; @@ -70,7 +68,6 @@ import org.elasticsearch.plugins.CircuitBreakerPlugin; import org.elasticsearch.plugins.ExtensiblePlugin; import org.elasticsearch.plugins.IngestPlugin; -import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.PersistentTaskPlugin; import org.elasticsearch.plugins.Platforms; import org.elasticsearch.plugins.Plugin; @@ -366,8 +363,6 @@ import org.elasticsearch.xpack.ml.job.process.normalizer.NormalizerProcessFactory; import org.elasticsearch.xpack.ml.job.snapshot.upgrader.SnapshotUpgradeTaskExecutor; import org.elasticsearch.xpack.ml.job.task.OpenJobPersistentTasksExecutor; -import org.elasticsearch.xpack.ml.mapper.SemanticTextFieldMapper; -import org.elasticsearch.xpack.ml.mapper.SemanticTextInferenceResultFieldMapper; import org.elasticsearch.xpack.ml.notifications.AnomalyDetectionAuditor; import org.elasticsearch.xpack.ml.notifications.DataFrameAnalyticsAuditor; import org.elasticsearch.xpack.ml.notifications.InferenceAuditor; @@ -487,8 +482,7 @@ public class MachineLearning extends Plugin PersistentTaskPlugin, SearchPlugin, ShutdownAwarePlugin, - ExtensiblePlugin, - MapperPlugin { + ExtensiblePlugin { public static final String NAME = "ml"; public static final String BASE_PATH = "/_ml/"; // Endpoints that were deprecated in 7.x can still be called in 8.x using the REST compatibility layer @@ -2298,17 +2292,4 @@ public void signalShutdown(Collection shutdownNodeIds) { mlLifeCycleService.get().signalGracefulShutdown(shutdownNodeIds); } } - - @Override - public Map getMappers() { - if (SemanticTextFeature.isEnabled()) { - return Map.of(SemanticTextFieldMapper.CONTENT_TYPE, SemanticTextFieldMapper.PARSER); - } - return Map.of(); - } - - @Override - public Map getMetadataMappers() { - return Map.of(SemanticTextInferenceResultFieldMapper.NAME, SemanticTextInferenceResultFieldMapper.PARSER); - } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java index 5af3fd527e31e..2d7832d747de4 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearning.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.analysis.CharFilterFactory; import org.elasticsearch.index.analysis.TokenizerFactory; -import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.license.LicenseService; import org.elasticsearch.license.XPackLicenseState; @@ -103,11 +102,6 @@ public Map> getTokeniz return mlPlugin.getTokenizers(); } - @Override - public Map getMappers() { - return mlPlugin.getMappers(); - } - /** * This is only required as we now have to have the GetRollupIndexCapsAction as a valid action in our node. * The MachineLearningLicenseTests attempt to create a datafeed referencing this LocalStateMachineLearning object. From 5f325187cb7ef997e152132943b770a8b00447ea Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 6 Feb 2024 16:15:24 +0000 Subject: [PATCH 070/106] [ML] Make task_type optional (#104483) Makes the task_type element of the _inference API optional so that it is possible to GET, DELETE or POST to an inference entity without providing the task type --- docs/changelog/104483.yaml | 5 ++ .../inference/delete-inference.asciidoc | 13 ++-- .../inference/get-inference.asciidoc | 8 ++- .../inference/post-inference.asciidoc | 9 +-- .../api/inference.delete_model.json | 18 +++++- .../api/inference.get_model.json | 20 +++++-- .../api/inference.inference.json | 20 +++++-- .../api/inference.put_model.json | 20 +++++-- .../action/DeleteInferenceModelAction.java | 4 +- .../inference/action/InferenceAction.java | 11 +--- .../action/PutInferenceModelAction.java | 4 +- .../action/PutInferenceModelActionTests.java | 4 +- .../inference/InferenceBaseRestTest.java | 59 ++++++++++++++++--- .../xpack/inference/InferenceCrudIT.java | 30 +++++++++- .../inference/MockInferenceServiceIT.java | 2 +- .../TransportDeleteInferenceModelAction.java | 13 ++++ .../TransportPutInferenceModelAction.java | 42 ++++++++++++- .../xpack/inference/rest/Paths.java | 20 +++++++ .../rest/RestDeleteInferenceModelAction.java | 19 +++++- .../rest/RestGetInferenceModelAction.java | 16 +++-- .../inference/rest/RestInferenceAction.java | 19 +++++- .../rest/RestPutInferenceModelAction.java | 21 +++++-- .../action/InferenceActionRequestTests.java | 6 +- .../action/PutInferenceModelRequestTests.java | 10 ++-- ...TransportPutInferenceModelActionTests.java | 49 +++++++++++++++ .../test/inference/inference_crud.yml | 9 ++- 26 files changed, 369 insertions(+), 82 deletions(-) create mode 100644 docs/changelog/104483.yaml create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/Paths.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelActionTests.java diff --git a/docs/changelog/104483.yaml b/docs/changelog/104483.yaml new file mode 100644 index 0000000000000..99917b4e8e017 --- /dev/null +++ b/docs/changelog/104483.yaml @@ -0,0 +1,5 @@ +pr: 104483 +summary: Make `task_type` optional in `_inference` APIs +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/reference/inference/delete-inference.asciidoc b/docs/reference/inference/delete-inference.asciidoc index 692a96212f5ca..850b4ef1b10b0 100644 --- a/docs/reference/inference/delete-inference.asciidoc +++ b/docs/reference/inference/delete-inference.asciidoc @@ -6,9 +6,9 @@ experimental[] Deletes an {infer} model deployment. -IMPORTANT: The {infer} APIs enable you to use certain services, such as ELSER, -OpenAI, or Hugging Face, in your cluster. This is not the same feature that you -can use on an ML node with custom {ml} models. If you want to train and use your +IMPORTANT: The {infer} APIs enable you to use certain services, such as ELSER, +OpenAI, or Hugging Face, in your cluster. This is not the same feature that you +can use on an ML node with custom {ml} models. If you want to train and use your own model, use the <>. @@ -16,6 +16,7 @@ own model, use the <>. [[delete-inference-api-request]] ==== {api-request-title} +`DELETE /_inference/` `DELETE /_inference//` [discrete] @@ -34,7 +35,7 @@ own model, use the <>. The unique identifier of the {infer} model to delete. :: -(Required, string) +(Optional, string) The type of {infer} task that the model performs. @@ -42,7 +43,7 @@ The type of {infer} task that the model performs. [[delete-inference-api-example]] ==== {api-examples-title} -The following API call deletes the `my-elser-model` {infer} model that can +The following API call deletes the `my-elser-model` {infer} model that can perform `sparse_embedding` tasks. @@ -61,4 +62,4 @@ The API returns the following response: "acknowledged": true } ------------------------------------------------------------ -// NOTCONSOLE \ No newline at end of file +// NOTCONSOLE diff --git a/docs/reference/inference/get-inference.asciidoc b/docs/reference/inference/get-inference.asciidoc index 45f4cb67e7674..176909bc5458f 100644 --- a/docs/reference/inference/get-inference.asciidoc +++ b/docs/reference/inference/get-inference.asciidoc @@ -6,9 +6,9 @@ experimental[] Retrieves {infer} model information. -IMPORTANT: The {infer} APIs enable you to use certain services, such as ELSER, -OpenAI, or Hugging Face, in your cluster. This is not the same feature that you -can use on an ML node with custom {ml} models. If you want to train and use your +IMPORTANT: The {infer} APIs enable you to use certain services, such as ELSER, +OpenAI, or Hugging Face, in your cluster. This is not the same feature that you +can use on an ML node with custom {ml} models. If you want to train and use your own model, use the <>. @@ -18,6 +18,8 @@ own model, use the <>. `GET /_inference/_all` +`GET /_inference/` + `GET /_inference//_all` `GET /_inference//` diff --git a/docs/reference/inference/post-inference.asciidoc b/docs/reference/inference/post-inference.asciidoc index 9ef633160f162..4fb6ea5a4fb6d 100644 --- a/docs/reference/inference/post-inference.asciidoc +++ b/docs/reference/inference/post-inference.asciidoc @@ -6,9 +6,9 @@ experimental[] Performs an inference task on an input text by using an {infer} model. -IMPORTANT: The {infer} APIs enable you to use certain services, such as ELSER, -OpenAI, or Hugging Face, in your cluster. This is not the same feature that you -can use on an ML node with custom {ml} models. If you want to train and use your +IMPORTANT: The {infer} APIs enable you to use certain services, such as ELSER, +OpenAI, or Hugging Face, in your cluster. This is not the same feature that you +can use on an ML node with custom {ml} models. If you want to train and use your own model, use the <>. @@ -16,6 +16,7 @@ own model, use the <>. [[post-inference-api-request]] ==== {api-request-title} +`POST /_inference/` `POST /_inference//` @@ -46,7 +47,7 @@ The unique identifier of the {infer} model. ``:: -(Required, string) +(Optional, string) The type of {infer} task that the model performs. diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.delete_model.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.delete_model.json index 555370f3193f8..b4cb5f39ff64d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.delete_model.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.delete_model.json @@ -12,16 +12,28 @@ "url":{ "paths":[ { - "path":"/_inference/{task_type}/{model_id}", + "path": "/_inference/{inference_id}", + "methods": [ + "DELETE" + ], + "parts": { + "inference_id": { + "type": "string", + "description": "The inference Id" + } + } + }, + { + "path":"/_inference/{task_type}/{inference_id}", "methods":[ "DELETE" ], "parts":{ "task_type":{ "type":"string", - "description":"The model task type" + "description":"The task type" }, - "model_id":{ + "inference_id":{ "type":"string", "description":"The model Id" } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.get_model.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.get_model.json index bddc9909c0c07..f9340810e2e43 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.get_model.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.get_model.json @@ -12,18 +12,30 @@ "url":{ "paths":[ { - "path":"/_inference/{task_type}/{model_id}", + "path":"/_inference/{inference_id}", + "methods":[ + "GET" + ], + "parts":{ + "inference_id":{ + "type":"string", + "description":"The inference Id" + } + } + }, + { + "path":"/_inference/{task_type}/{inference_id}", "methods":[ "GET" ], "parts":{ "task_type":{ "type":"string", - "description":"The model task type" + "description":"The task type" }, - "model_id":{ + "inference_id":{ "type":"string", - "description":"The model Id" + "description":"The inference Id" } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.inference.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.inference.json index 9426d6738c374..474ca206a101b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.inference.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.inference.json @@ -13,18 +13,30 @@ "url":{ "paths":[ { - "path":"/_inference/{task_type}/{model_id}", + "path":"/_inference/{inference_id}", + "methods":[ + "POST" + ], + "parts":{ + "inference_id":{ + "type":"string", + "description":"The inference Id" + } + } + }, + { + "path":"/_inference/{task_type}/{inference_id}", "methods":[ "POST" ], "parts":{ "task_type":{ "type":"string", - "description":"The model task type" + "description":"The task type" }, - "model_id":{ + "inference_id":{ "type":"string", - "description":"The model Id" + "description":"The inference Id" } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_model.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_model.json index 26ba9ddb00608..4c2856c342088 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_model.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.put_model.json @@ -13,18 +13,30 @@ "url":{ "paths":[ { - "path":"/_inference/{task_type}/{model_id}", + "path":"/_inference/{inference_id}", + "methods":[ + "PUT" + ], + "parts":{ + "inference_id":{ + "type":"string", + "description":"The inference Id" + } + } + }, + { + "path":"/_inference/{task_type}/{inference_id}", "methods":[ "PUT" ], "parts":{ "task_type":{ "type":"string", - "description":"The model task type" + "description":"The task type" }, - "model_id":{ + "inference_id":{ "type":"string", - "description":"The model Id" + "description":"The inference Id" } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceModelAction.java index e8dd9bb7b21b3..e09dcfbb3df10 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/DeleteInferenceModelAction.java @@ -31,9 +31,9 @@ public static class Request extends AcknowledgedRequest p.mapOrdered(), TASK_SETTINGS); } - public static Request parseRequest(String inferenceEntityId, String taskType, XContentParser parser) { + public static Request parseRequest(String inferenceEntityId, TaskType taskType, XContentParser parser) { Request.Builder builder = PARSER.apply(parser, null); builder.setInferenceEntityId(inferenceEntityId); builder.setTaskType(taskType); @@ -197,13 +197,8 @@ public Builder setInferenceEntityId(String inferenceEntityId) { return this; } - public Builder setTaskType(String taskTypeStr) { - try { - TaskType taskType = TaskType.fromString(taskTypeStr); - this.taskType = Objects.requireNonNull(taskType); - } catch (IllegalArgumentException e) { - throw new ElasticsearchStatusException("Unknown task_type [{}]", RestStatus.BAD_REQUEST, taskTypeStr); - } + public Builder setTaskType(TaskType taskType) { + this.taskType = taskType; return this; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java index 170bff5a1908a..c61fd52ba10e2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java @@ -42,8 +42,8 @@ public static class Request extends AcknowledgedRequest { private final BytesReference content; private final XContentType contentType; - public Request(String taskType, String inferenceEntityId, BytesReference content, XContentType contentType) { - this.taskType = TaskType.fromStringOrStatusException(taskType); + public Request(TaskType taskType, String inferenceEntityId, BytesReference content, XContentType contentType) { + this.taskType = taskType; this.inferenceEntityId = inferenceEntityId; this.content = content; this.contentType = contentType; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelActionTests.java index 10f35bf33f631..e0b04c6fe8769 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelActionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelActionTests.java @@ -19,14 +19,14 @@ import java.util.Locale; public class PutInferenceModelActionTests extends ESTestCase { - public static String TASK_TYPE; + public static TaskType TASK_TYPE; public static String MODEL_ID; public static XContentType X_CONTENT_TYPE; public static BytesReference BYTES; @Before public void setup() throws Exception { - TASK_TYPE = TaskType.ANY.toString(); + TASK_TYPE = TaskType.SPARSE_EMBEDDING; MODEL_ID = randomAlphaOfLengthBetween(1, 10).toLowerCase(Locale.ROOT); X_CONTENT_TYPE = randomFrom(XContentType.values()); BYTES = new BytesArray(randomAlphaOfLengthBetween(1, 10)); diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java index 1578e03608e82..1fe011acc44a3 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; @@ -50,8 +51,14 @@ protected Settings restClientSettings() { } static String mockServiceModelConfig() { - return """ + return mockServiceModelConfig(null); + } + + static String mockServiceModelConfig(@Nullable TaskType taskTypeInBody) { + var taskType = taskTypeInBody == null ? "" : "\"task_type\": \"" + taskTypeInBody + "\","; + return Strings.format(""" { + %s "service": "test_service", "service_settings": { "model": "my_model", @@ -61,11 +68,35 @@ static String mockServiceModelConfig() { "temperature": 3 } } - """; + """, taskType); + } + + protected void deleteModel(String modelId) throws IOException { + var request = new Request("DELETE", "_inference/" + modelId); + var response = client().performRequest(request); + assertOkOrCreated(response); + } + + protected void deleteModel(String modelId, TaskType taskType) throws IOException { + var request = new Request("DELETE", Strings.format("_inference/%s/%s", taskType, modelId)); + var response = client().performRequest(request); + assertOkOrCreated(response); } protected Map putModel(String modelId, String modelConfig, TaskType taskType) throws IOException { String endpoint = Strings.format("_inference/%s/%s", taskType, modelId); + return putModelInternal(endpoint, modelConfig); + } + + /** + * Task type should be in modelConfig + */ + protected Map putModel(String modelId, String modelConfig) throws IOException { + String endpoint = Strings.format("_inference/%s", modelId); + return putModelInternal(endpoint, modelConfig); + } + + private Map putModelInternal(String endpoint, String modelConfig) throws IOException { var request = new Request("PUT", endpoint); request.setJsonEntity(modelConfig); var response = client().performRequest(request); @@ -73,24 +104,38 @@ protected Map putModel(String modelId, String modelConfig, TaskT return entityAsMap(response); } + protected Map getModel(String modelId) throws IOException { + var endpoint = Strings.format("_inference/%s", modelId); + return getAllModelInternal(endpoint); + } + protected Map getModels(String modelId, TaskType taskType) throws IOException { var endpoint = Strings.format("_inference/%s/%s", taskType, modelId); - var request = new Request("GET", endpoint); - var response = client().performRequest(request); - assertOkOrCreated(response); - return entityAsMap(response); + return getAllModelInternal(endpoint); } protected Map getAllModels() throws IOException { - var endpoint = Strings.format("_inference/_all"); + return getAllModelInternal("_inference/_all"); + } + + private Map getAllModelInternal(String endpoint) throws IOException { var request = new Request("GET", endpoint); var response = client().performRequest(request); assertOkOrCreated(response); return entityAsMap(response); } + protected Map inferOnMockService(String modelId, List input) throws IOException { + var endpoint = Strings.format("_inference/%s", modelId); + return inferOnMockServiceInternal(endpoint, input); + } + protected Map inferOnMockService(String modelId, TaskType taskType, List input) throws IOException { var endpoint = Strings.format("_inference/%s/%s", taskType, modelId); + return inferOnMockServiceInternal(endpoint, input); + } + + private Map inferOnMockServiceInternal(String endpoint, List input) throws IOException { var request = new Request("POST", endpoint); var bodyBuilder = new StringBuilder("{\"input\": ["); diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java index 84b6bb94503c3..82e8e5aedc378 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java @@ -48,6 +48,13 @@ public void testGet() throws IOException { var singleModel = (List>) getModels("se_model_1", TaskType.SPARSE_EMBEDDING).get("models"); assertThat(singleModel, hasSize(1)); assertEquals("se_model_1", singleModel.get(0).get("model_id")); + + for (int i = 0; i < 5; i++) { + deleteModel("se_model_" + i, TaskType.SPARSE_EMBEDDING); + } + for (int i = 0; i < 4; i++) { + deleteModel("te_model_" + i, TaskType.TEXT_EMBEDDING); + } } public void testGetModelWithWrongTaskType() throws IOException { @@ -59,13 +66,34 @@ public void testGetModelWithWrongTaskType() throws IOException { ); } + public void testDeleteModelWithWrongTaskType() throws IOException { + putModel("sparse_embedding_model", mockServiceModelConfig(), TaskType.SPARSE_EMBEDDING); + var e = expectThrows(ResponseException.class, () -> deleteModel("sparse_embedding_model", TaskType.TEXT_EMBEDDING)); + assertThat( + e.getMessage(), + containsString("Requested task type [text_embedding] does not match the model's task type [sparse_embedding]") + ); + } + @SuppressWarnings("unchecked") public void testGetModelWithAnyTaskType() throws IOException { String modelId = "sparse_embedding_model"; putModel(modelId, mockServiceModelConfig(), TaskType.SPARSE_EMBEDDING); var singleModel = (List>) getModels(modelId, TaskType.ANY).get("models"); - System.out.println("MODEL" + singleModel); assertEquals(modelId, singleModel.get(0).get("model_id")); assertEquals(TaskType.SPARSE_EMBEDDING.toString(), singleModel.get(0).get("task_type")); } + + @SuppressWarnings("unchecked") + public void testApisWithoutTaskType() throws IOException { + String modelId = "no_task_type_in_url"; + putModel(modelId, mockServiceModelConfig(TaskType.SPARSE_EMBEDDING)); + var singleModel = (List>) getModel(modelId).get("models"); + assertEquals(modelId, singleModel.get(0).get("model_id")); + assertEquals(TaskType.SPARSE_EMBEDDING.toString(), singleModel.get(0).get("task_type")); + + var inference = inferOnMockService(modelId, List.of(randomAlphaOfLength(10))); + assertNonEmptyInferenceResults(inference, 1, TaskType.SPARSE_EMBEDDING); + deleteModel(modelId); + } } diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockInferenceServiceIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockInferenceServiceIT.java index f8abfd45a8566..1629f4c845ec2 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockInferenceServiceIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockInferenceServiceIT.java @@ -29,7 +29,7 @@ public void testMockService() throws IOException { } // The response is randomly generated, the input can be anything - var inference = inferOnMockService(modelId, TaskType.SPARSE_EMBEDDING, List.of(randomAlphaOfLength(10))); + var inference = inferOnMockService(modelId, List.of(randomAlphaOfLength(10))); assertNonEmptyInferenceResults(inference, 1, TaskType.SPARSE_EMBEDDING); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java index 9b110f7b8e7a4..ad6042581f264 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportDeleteInferenceModelAction.java @@ -71,6 +71,19 @@ protected void masterOperation( SubscribableListener.newForked(modelConfigListener -> { modelRegistry.getModel(request.getInferenceEntityId(), modelConfigListener); }).andThen((l1, unparsedModel) -> { + + if (request.getTaskType().isAnyOrSame(unparsedModel.taskType()) == false) { + // specific task type in request does not match the models + l1.onFailure( + new ElasticsearchStatusException( + "Requested task type [{}] does not match the model's task type [{}]", + RestStatus.BAD_REQUEST, + request.getTaskType(), + unparsedModel.taskType() + ) + ); + return; + } var service = serviceRegistry.getService(unparsedModel.service()); if (service.isPresent()) { service.get().stop(request.getInferenceEntityId(), l1); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java index f94da64558132..b2a30a3b7e931 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java @@ -96,6 +96,8 @@ protected void masterOperation( ) throws Exception { var requestAsMap = requestToMap(request); + var resolvedTaskType = resolveTaskType(request.getTaskType(), (String) requestAsMap.remove(TaskType.NAME)); + String serviceName = (String) requestAsMap.remove(ModelConfigurations.SERVICE); if (serviceName == null) { listener.onFailure(new ElasticsearchStatusException("Model configuration is missing a service", RestStatus.BAD_REQUEST)); @@ -151,7 +153,7 @@ protected void masterOperation( parseAndStoreModel( service.get(), request.getInferenceEntityId(), - request.getTaskType(), + resolvedTaskType, requestAsMap, // In Elastic cloud ml nodes run on Linux x86 Set.of("linux-x86_64"), @@ -162,7 +164,7 @@ protected void masterOperation( parseAndStoreModel( service.get(), request.getInferenceEntityId(), - request.getTaskType(), + resolvedTaskType, requestAsMap, architectures, delegate @@ -171,7 +173,7 @@ protected void masterOperation( }), client, threadPool.executor(InferencePlugin.UTILITY_THREAD_POOL_NAME)); } else { // Not an in cluster service, it does not care about the cluster platform - parseAndStoreModel(service.get(), request.getInferenceEntityId(), request.getTaskType(), requestAsMap, Set.of(), listener); + parseAndStoreModel(service.get(), request.getInferenceEntityId(), resolvedTaskType, requestAsMap, Set.of(), listener); } } @@ -235,4 +237,38 @@ static boolean clusterIsInElasticCloud(ClusterSettings settings) { // One such heuristic is where USE_AUTO_MACHINE_MEMORY_PERCENT == true return settings.get(MachineLearningField.USE_AUTO_MACHINE_MEMORY_PERCENT); } + + /** + * task_type can be specified as either a URL parameter or in the + * request body. Resolve which to use or throw if the settings are + * inconsistent + * @param urlTaskType Taken from the URL parameter. ANY means not specified. + * @param bodyTaskType Taken from the request body. Maybe null + * @return The resolved task type + */ + static TaskType resolveTaskType(TaskType urlTaskType, String bodyTaskType) { + if (bodyTaskType == null) { + if (urlTaskType == TaskType.ANY) { + throw new ElasticsearchStatusException("model is missing required setting [task_type]", RestStatus.BAD_REQUEST); + } else { + return urlTaskType; + } + } + + TaskType parsedBodyTask = TaskType.fromStringOrStatusException(bodyTaskType); + if (parsedBodyTask == TaskType.ANY) { + throw new ElasticsearchStatusException("task_type [any] is not valid type for inference", RestStatus.BAD_REQUEST); + } + + if (parsedBodyTask.isAnyOrSame(urlTaskType) == false) { + throw new ElasticsearchStatusException( + "Cannot resolve conflicting task_type parameter in the request URL [{}] and the request body [{}]", + RestStatus.BAD_REQUEST, + urlTaskType.toString(), + bodyTaskType + ); + } + + return parsedBodyTask; + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/Paths.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/Paths.java new file mode 100644 index 0000000000000..1fc67d379a703 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/Paths.java @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.rest; + +public final class Paths { + + static final String INFERENCE_ID = "inference_id"; + static final String TASK_TYPE_OR_INFERENCE_ID = "task_type_or_id"; + static final String INFERENCE_ID_PATH = "_inference/{" + TASK_TYPE_OR_INFERENCE_ID + "}"; + static final String TASK_TYPE_INFERENCE_ID_PATH = "_inference/{" + TASK_TYPE_OR_INFERENCE_ID + "}/{" + INFERENCE_ID + "}"; + + private Paths() { + + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestDeleteInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestDeleteInferenceModelAction.java index 9efecf1d28024..985a3c5045c3a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestDeleteInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestDeleteInferenceModelAction.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.inference.rest; import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -18,9 +19,14 @@ import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.DELETE; +import static org.elasticsearch.xpack.inference.rest.Paths.INFERENCE_ID; +import static org.elasticsearch.xpack.inference.rest.Paths.INFERENCE_ID_PATH; +import static org.elasticsearch.xpack.inference.rest.Paths.TASK_TYPE_INFERENCE_ID_PATH; +import static org.elasticsearch.xpack.inference.rest.Paths.TASK_TYPE_OR_INFERENCE_ID; @ServerlessScope(Scope.PUBLIC) public class RestDeleteInferenceModelAction extends BaseRestHandler { + @Override public String getName() { return "delete_inference_model_action"; @@ -28,13 +34,20 @@ public String getName() { @Override public List routes() { - return List.of(new Route(DELETE, "_inference/{task_type}/{model_id}")); + return List.of(new Route(DELETE, INFERENCE_ID_PATH), new Route(DELETE, TASK_TYPE_INFERENCE_ID_PATH)); } @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { - String taskType = restRequest.param("task_type"); - String inferenceEntityId = restRequest.param("model_id"); + String inferenceEntityId; + TaskType taskType; + if (restRequest.hasParam(INFERENCE_ID)) { + inferenceEntityId = restRequest.param(INFERENCE_ID); + taskType = TaskType.fromStringOrStatusException(restRequest.param(TASK_TYPE_OR_INFERENCE_ID)); + } else { + inferenceEntityId = restRequest.param(TASK_TYPE_OR_INFERENCE_ID); + taskType = TaskType.ANY; + } var request = new DeleteInferenceModelAction.Request(inferenceEntityId, taskType); return channel -> client.execute(DeleteInferenceModelAction.INSTANCE, request, new RestToXContentListener<>(channel)); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java index 310c9b6dd6f95..4de6ff7af1f15 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestGetInferenceModelAction.java @@ -19,9 +19,14 @@ import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.GET; +import static org.elasticsearch.xpack.inference.rest.Paths.INFERENCE_ID; +import static org.elasticsearch.xpack.inference.rest.Paths.INFERENCE_ID_PATH; +import static org.elasticsearch.xpack.inference.rest.Paths.TASK_TYPE_INFERENCE_ID_PATH; +import static org.elasticsearch.xpack.inference.rest.Paths.TASK_TYPE_OR_INFERENCE_ID; @ServerlessScope(Scope.PUBLIC) public class RestGetInferenceModelAction extends BaseRestHandler { + @Override public String getName() { return "get_inference_model_action"; @@ -29,20 +34,23 @@ public String getName() { @Override public List routes() { - return List.of(new Route(GET, "_inference/{task_type}/{model_id}"), new Route(GET, "_inference/_all")); + return List.of(new Route(GET, "_inference/_all"), new Route(GET, INFERENCE_ID_PATH), new Route(GET, TASK_TYPE_INFERENCE_ID_PATH)); } @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { String inferenceEntityId = null; TaskType taskType = null; - if (restRequest.hasParam("task_type") == false && restRequest.hasParam("model_id") == false) { + if (restRequest.hasParam(TASK_TYPE_OR_INFERENCE_ID) == false && restRequest.hasParam(INFERENCE_ID) == false) { // _all models request inferenceEntityId = "_all"; taskType = TaskType.ANY; + } else if (restRequest.hasParam(INFERENCE_ID)) { + inferenceEntityId = restRequest.param(INFERENCE_ID); + taskType = TaskType.fromStringOrStatusException(restRequest.param(TASK_TYPE_OR_INFERENCE_ID)); } else { - taskType = TaskType.fromStringOrStatusException(restRequest.param("task_type")); - inferenceEntityId = restRequest.param("model_id"); + inferenceEntityId = restRequest.param(TASK_TYPE_OR_INFERENCE_ID); + taskType = TaskType.ANY; } var request = new GetInferenceModelAction.Request(inferenceEntityId, taskType); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java index ff1931438dda9..309355d9401a3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.inference.rest; import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -19,6 +20,10 @@ import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.xpack.inference.rest.Paths.INFERENCE_ID; +import static org.elasticsearch.xpack.inference.rest.Paths.INFERENCE_ID_PATH; +import static org.elasticsearch.xpack.inference.rest.Paths.TASK_TYPE_INFERENCE_ID_PATH; +import static org.elasticsearch.xpack.inference.rest.Paths.TASK_TYPE_OR_INFERENCE_ID; @ServerlessScope(Scope.PUBLIC) public class RestInferenceAction extends BaseRestHandler { @@ -29,13 +34,21 @@ public String getName() { @Override public List routes() { - return List.of(new Route(POST, "_inference/{task_type}/{model_id}")); + return List.of(new Route(POST, INFERENCE_ID_PATH), new Route(POST, TASK_TYPE_INFERENCE_ID_PATH)); } @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { - String taskType = restRequest.param("task_type"); - String inferenceEntityId = restRequest.param("model_id"); + String inferenceEntityId; + TaskType taskType; + if (restRequest.hasParam(INFERENCE_ID)) { + inferenceEntityId = restRequest.param(INFERENCE_ID); + taskType = TaskType.fromStringOrStatusException(restRequest.param(TASK_TYPE_OR_INFERENCE_ID)); + } else { + inferenceEntityId = restRequest.param(TASK_TYPE_OR_INFERENCE_ID); + taskType = TaskType.ANY; + } + try (var parser = restRequest.contentParser()) { var request = InferenceAction.Request.parseRequest(inferenceEntityId, taskType, parser); return channel -> client.execute(InferenceAction.INSTANCE, request, new RestToXContentListener<>(channel)); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestPutInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestPutInferenceModelAction.java index 9a1f15f08a89a..0523160ee19c2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestPutInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestPutInferenceModelAction.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.inference.rest; import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.inference.TaskType; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -15,10 +16,13 @@ import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.inference.action.PutInferenceModelAction; -import java.io.IOException; import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.PUT; +import static org.elasticsearch.xpack.inference.rest.Paths.INFERENCE_ID; +import static org.elasticsearch.xpack.inference.rest.Paths.INFERENCE_ID_PATH; +import static org.elasticsearch.xpack.inference.rest.Paths.TASK_TYPE_INFERENCE_ID_PATH; +import static org.elasticsearch.xpack.inference.rest.Paths.TASK_TYPE_OR_INFERENCE_ID; @ServerlessScope(Scope.PUBLIC) public class RestPutInferenceModelAction extends BaseRestHandler { @@ -29,13 +33,20 @@ public String getName() { @Override public List routes() { - return List.of(new Route(PUT, "_inference/{task_type}/{model_id}")); + return List.of(new Route(PUT, INFERENCE_ID_PATH), new Route(PUT, TASK_TYPE_INFERENCE_ID_PATH)); } @Override - protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { - String taskType = restRequest.param("task_type"); - String inferenceEntityId = restRequest.param("model_id"); + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + String inferenceEntityId; + TaskType taskType; + if (restRequest.hasParam(INFERENCE_ID)) { + inferenceEntityId = restRequest.param(INFERENCE_ID); + taskType = TaskType.fromStringOrStatusException(restRequest.param(TASK_TYPE_OR_INFERENCE_ID)); + } else { + inferenceEntityId = restRequest.param(TASK_TYPE_OR_INFERENCE_ID); + taskType = TaskType.ANY; // task type must be defined in the body + } var request = new PutInferenceModelAction.Request( taskType, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionRequestTests.java index 396af55ce5616..dd7be116d8175 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/InferenceActionRequestTests.java @@ -51,7 +51,7 @@ public void testParsing() throws IOException { } """; try (var parser = createParser(JsonXContent.jsonXContent, singleInputRequest)) { - var request = InferenceAction.Request.parseRequest("model_id", "sparse_embedding", parser); + var request = InferenceAction.Request.parseRequest("model_id", TaskType.SPARSE_EMBEDDING, parser); assertThat(request.getInput(), contains("single text input")); } @@ -61,7 +61,7 @@ public void testParsing() throws IOException { } """; try (var parser = createParser(JsonXContent.jsonXContent, multiInputRequest)) { - var request = InferenceAction.Request.parseRequest("model_id", "sparse_embedding", parser); + var request = InferenceAction.Request.parseRequest("model_id", TaskType.ANY, parser); assertThat(request.getInput(), contains("an array", "of", "inputs")); } } @@ -73,7 +73,7 @@ public void testParseRequest_DefaultsInputTypeToIngest() throws IOException { } """; try (var parser = createParser(JsonXContent.jsonXContent, singleInputRequest)) { - var request = InferenceAction.Request.parseRequest("model_id", "sparse_embedding", parser); + var request = InferenceAction.Request.parseRequest("model_id", TaskType.SPARSE_EMBEDDING, parser); assertThat(request.getInputType(), is(InputType.UNSPECIFIED)); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelRequestTests.java index d69cc58dc0871..f61398fcacacf 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/PutInferenceModelRequestTests.java @@ -22,7 +22,7 @@ protected Writeable.Reader instanceReader() { @Override protected PutInferenceModelAction.Request createTestInstance() { return new PutInferenceModelAction.Request( - randomFrom(TaskType.values()).toString(), + randomFrom(TaskType.values()), randomAlphaOfLength(6), randomBytesReference(50), randomFrom(XContentType.values()) @@ -33,25 +33,25 @@ protected PutInferenceModelAction.Request createTestInstance() { protected PutInferenceModelAction.Request mutateInstance(PutInferenceModelAction.Request instance) { return switch (randomIntBetween(0, 3)) { case 0 -> new PutInferenceModelAction.Request( - TaskType.values()[(instance.getTaskType().ordinal() + 1) % TaskType.values().length].toString(), + TaskType.values()[(instance.getTaskType().ordinal() + 1) % TaskType.values().length], instance.getInferenceEntityId(), instance.getContent(), instance.getContentType() ); case 1 -> new PutInferenceModelAction.Request( - instance.getTaskType().toString(), + instance.getTaskType(), instance.getInferenceEntityId() + "foo", instance.getContent(), instance.getContentType() ); case 2 -> new PutInferenceModelAction.Request( - instance.getTaskType().toString(), + instance.getTaskType(), instance.getInferenceEntityId(), randomBytesReference(instance.getContent().length() + 1), instance.getContentType() ); case 3 -> new PutInferenceModelAction.Request( - instance.getTaskType().toString(), + instance.getTaskType(), instance.getInferenceEntityId(), instance.getContent(), XContentType.values()[(instance.getContentType().ordinal() + 1) % XContentType.values().length] diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelActionTests.java new file mode 100644 index 0000000000000..27e56c1bd973d --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelActionTests.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.action; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.containsString; + +public class TransportPutInferenceModelActionTests extends ESTestCase { + + public void testResolveTaskType() { + + assertEquals(TaskType.SPARSE_EMBEDDING, TransportPutInferenceModelAction.resolveTaskType(TaskType.SPARSE_EMBEDDING, null)); + assertEquals( + TaskType.SPARSE_EMBEDDING, + TransportPutInferenceModelAction.resolveTaskType(TaskType.ANY, TaskType.SPARSE_EMBEDDING.toString()) + ); + + var e = expectThrows( + ElasticsearchStatusException.class, + () -> TransportPutInferenceModelAction.resolveTaskType(TaskType.ANY, null) + ); + assertThat(e.getMessage(), containsString("model is missing required setting [task_type]")); + + e = expectThrows( + ElasticsearchStatusException.class, + () -> TransportPutInferenceModelAction.resolveTaskType(TaskType.ANY, TaskType.ANY.toString()) + ); + assertThat(e.getMessage(), containsString("task_type [any] is not valid type for inference")); + + e = expectThrows( + ElasticsearchStatusException.class, + () -> TransportPutInferenceModelAction.resolveTaskType(TaskType.SPARSE_EMBEDDING, TaskType.TEXT_EMBEDDING.toString()) + ); + assertThat( + e.getMessage(), + containsString( + "Cannot resolve conflicting task_type parameter in the request URL [sparse_embedding] and the request body [text_embedding]" + ) + ); + } +} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/inference/inference_crud.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/inference/inference_crud.yml index f33201ff53ae6..39a107373c8a3 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/inference/inference_crud.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/inference/inference_crud.yml @@ -3,8 +3,7 @@ - do: catch: missing inference.get_model: - task_type: sparse_embedding - model_id: model_to_get + inference_id: model_to_get - match: { error.type: "resource_not_found_exception" } - match: { error.reason: "Model not found [model_to_get]" } @@ -13,10 +12,10 @@ - do: catch: bad_request inference.put_model: - task_type: bad - model_id: elser_model + inference_id: elser_model body: > { + "task_type": "bad", "service": "elser", "service_settings": { "num_allocations": 1, @@ -33,7 +32,7 @@ catch: bad_request inference.inference: task_type: bad - model_id: elser_model + inference_id: elser_model body: > { "input": "important text" From e8288fbaa8ac1c56ad20ce1a77f66acfafc26640 Mon Sep 17 00:00:00 2001 From: David Roberts Date: Tue, 6 Feb 2024 16:20:46 +0000 Subject: [PATCH 071/106] [ML] Improve docs around ML nodes and xpack.ml.enabled (#105199) Since these docs were originally written there have been a couple of changes: 1. We now support aarch64 as well as x86_64, so the SSE4.2 guidance needed clarification. 2. ML is more deeply embedded into Elasticsearch functionality across nodes that are not ML nodes. For example, ingest pipelines now routinely use ML, and, in the near future, index mappings will too in the form of semantic text. Although we cannot mandate that xpack.ml.enabled is set uniformly across the cluster, as that would be a breaking change, we should say ever more strongly that ML must be enabled on all nodes if all ML functionality is to work correctly. The primary reason for wanting to disable ML is hardware incompatibility, and if ML is disabled for that reason then it should not be used at all. --- docs/reference/settings/ml-settings.asciidoc | 22 ++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/docs/reference/settings/ml-settings.asciidoc b/docs/reference/settings/ml-settings.asciidoc index 2ac248d5ea8e7..1077a63b00249 100644 --- a/docs/reference/settings/ml-settings.asciidoc +++ b/docs/reference/settings/ml-settings.asciidoc @@ -10,9 +10,12 @@ // tag::ml-settings-description-tag[] You do not need to configure any settings to use {ml}. It is enabled by default. -IMPORTANT: {ml-cap} uses SSE4.2 instructions, so it works only on machines whose -CPUs {wikipedia}/SSE4#Supporting_CPUs[support] SSE4.2. If you run {es} on older -hardware, you must disable {ml} (by setting `xpack.ml.enabled` to `false`). +IMPORTANT: {ml-cap} uses SSE4.2 instructions on x86_64 machines, so it works only +on x86_64 machines whose CPUs {wikipedia}/SSE4#Supporting_CPUs[support] SSE4.2. +(This limitation does not apply to aarch64 machines.) If you run {es} on older +x86_64 hardware, you must disable {ml} (by setting `xpack.ml.enabled` to `false`). +In this situation you should not attempt to use {ml} functionality in your cluster +at all. // end::ml-settings-description-tag[] @@ -46,7 +49,18 @@ that you use the default value for this setting on all nodes. + If set to `false`, the {ml} APIs are disabled on the node. For example, the node cannot open jobs, start {dfeeds}, receive transport (internal) communication -requests, or requests from clients (including {kib}) related to {ml} APIs. +requests, or requests from clients (including {kib}) related to {ml} APIs. If +`xpack.ml.enabled` is not set uniformly across all nodes in your cluster then you +are likely to experience problems with {ml} functionality not fully working. ++ +You must not use any {ml} functionality from ingest pipelines if `xpack.ml.enabled` +is `false` on any node. Before setting `xpack.ml.enabled` to `false` on a node, +consider whether you really meant to just exclude `ml` from the `node.roles`. +Excluding `ml` from the <> will stop the node from +running {ml} jobs and NLP models, but it will still be aware that {ml} functionality +exists. Setting `xpack.ml.enabled` to `false` should be reserved for situations +where you cannot use {ml} functionality at all in your cluster due to hardware +limitations as described <>. `xpack.ml.inference_model.cache_size`:: (<>) The maximum inference cache size allowed. From 4d5416912b3bd3735f75176b89115b8874c095e4 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Tue, 6 Feb 2024 17:53:41 +0100 Subject: [PATCH 072/106] Use an AbstractList to build the AggregationList for reduction (#105200) We are building a list of InternalAggregations from a list of Buckets, therefore we can use an AbstractList to create the actual list and save some allocations. --- .../adjacency/InternalAdjacencyMatrix.java | 7 +++---- .../histogram/InternalAutoDateHistogram.java | 7 +++---- .../bucket/timeseries/InternalTimeSeries.java | 5 ++--- .../InternalMultiBucketAggregation.java | 20 +++++++++++++++++++ .../bucket/composite/InternalComposite.java | 9 ++++----- .../bucket/filter/InternalFilters.java | 7 +++---- .../bucket/geogrid/InternalGeoGrid.java | 7 +++---- .../histogram/InternalDateHistogram.java | 7 +++---- .../bucket/histogram/InternalHistogram.java | 7 +++---- .../InternalVariableWidthHistogram.java | 9 ++++----- .../bucket/prefix/InternalIpPrefix.java | 7 +++---- .../bucket/range/InternalBinaryRange.java | 6 +++--- .../bucket/range/InternalRange.java | 7 +++---- .../bucket/terms/AbstractInternalTerms.java | 7 +++---- .../bucket/terms/InternalRareTerms.java | 8 +++----- .../terms/InternalSignificantTerms.java | 7 +++---- 16 files changed, 66 insertions(+), 61 deletions(-) diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java index c17cc004e25b5..745585901311a 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java @@ -205,18 +205,17 @@ public InternalAggregation finalizeSampling(SamplingContext samplingContext) { @Override protected InternalBucket reduceBucket(List buckets, AggregationReduceContext context) { - assert buckets.size() > 0; + assert buckets.isEmpty() == false; InternalBucket reduced = null; - List aggregationsList = new ArrayList<>(buckets.size()); for (InternalBucket bucket : buckets) { if (reduced == null) { reduced = new InternalBucket(bucket.key, bucket.docCount, bucket.aggregations); } else { reduced.docCount += bucket.docCount; } - aggregationsList.add(bucket.aggregations); } - reduced.aggregations = InternalAggregations.reduce(aggregationsList, context); + final List aggregations = new BucketAggregationList<>(buckets); + reduced.aggregations = InternalAggregations.reduce(aggregations, context); return reduced; } diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java index de36a9721fe38..78f6d67b0f748 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java @@ -410,14 +410,13 @@ private List mergeBuckets( @Override protected Bucket reduceBucket(List buckets, AggregationReduceContext context) { - assert buckets.size() > 0; - List aggregations = new ArrayList<>(buckets.size()); + assert buckets.isEmpty() == false; long docCount = 0; for (Bucket bucket : buckets) { docCount += bucket.docCount; - aggregations.add(bucket.getAggregations()); } - InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); + final List aggregations = new BucketAggregationList<>(buckets); + final InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); return new InternalAutoDateHistogram.Bucket(buckets.get(0).key, docCount, format, aggs); } diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeries.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeries.java index 67a7773fd01bb..725bd5673bccf 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeries.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeries.java @@ -255,16 +255,15 @@ public InternalBucket createBucket(InternalAggregations aggregations, InternalBu @Override protected InternalBucket reduceBucket(List buckets, AggregationReduceContext context) { InternalTimeSeries.InternalBucket reduced = null; - List aggregationsList = new ArrayList<>(buckets.size()); for (InternalTimeSeries.InternalBucket bucket : buckets) { if (reduced == null) { reduced = new InternalTimeSeries.InternalBucket(bucket.key, bucket.docCount, bucket.aggregations, bucket.keyed); } else { reduced.docCount += bucket.docCount; } - aggregationsList.add(bucket.aggregations); } - reduced.aggregations = InternalAggregations.reduce(aggregationsList, context); + final List aggregations = new BucketAggregationList<>(buckets); + reduced.aggregations = InternalAggregations.reduce(aggregations, context); return reduced; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java index dda632e7aa020..8f6987dfa6be1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java @@ -15,6 +15,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.PipelineTree; import java.io.IOException; +import java.util.AbstractList; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -77,6 +78,25 @@ protected InternalMultiBucketAggregation(StreamInput in) throws IOException { */ protected abstract B reduceBucket(List buckets, AggregationReduceContext context); + /** Helps to lazily construct the aggregation list for reduction */ + protected static class BucketAggregationList extends AbstractList { + private final List buckets; + + public BucketAggregationList(List buckets) { + this.buckets = buckets; + } + + @Override + public InternalAggregations get(int index) { + return buckets.get(index).getAggregations(); + } + + @Override + public int size() { + return buckets.size(); + } + } + @Override public abstract List getBuckets(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java index 922baf1f83f83..e9dc079edaf14 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java @@ -283,19 +283,18 @@ public InternalAggregation finalizeSampling(SamplingContext samplingContext) { @Override protected InternalBucket reduceBucket(List buckets, AggregationReduceContext context) { - assert buckets.size() > 0; - List aggregations = new ArrayList<>(buckets.size()); + assert buckets.isEmpty() == false; long docCount = 0; for (InternalBucket bucket : buckets) { docCount += bucket.docCount; - aggregations.add(bucket.aggregations); } - InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); + final List aggregations = new BucketAggregationList<>(buckets); + final InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); /* Use the formats from the bucket because they'll be right to format * the key. The formats on the InternalComposite doing the reducing are * just whatever formats make sense for *its* index. This can be real * trouble when the index doing the reducing is unmapped. */ - var reducedFormats = buckets.get(0).formats; + final var reducedFormats = buckets.get(0).formats; return new InternalBucket(sourceNames, reducedFormats, buckets.get(0).key, reverseMuls, missingOrders, docCount, aggs); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java index 726589ca7c1b5..8ae5aed72a3a5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java @@ -238,18 +238,17 @@ public InternalAggregation finalizeSampling(SamplingContext samplingContext) { @Override protected InternalBucket reduceBucket(List buckets, AggregationReduceContext context) { - assert buckets.size() > 0; + assert buckets.isEmpty() == false; InternalBucket reduced = null; - List aggregationsList = new ArrayList<>(buckets.size()); for (InternalBucket bucket : buckets) { if (reduced == null) { reduced = new InternalBucket(bucket.key, bucket.docCount, bucket.aggregations, bucket.keyed, keyedBucket); } else { reduced.docCount += bucket.docCount; } - aggregationsList.add(bucket.aggregations); } - reduced.aggregations = InternalAggregations.reduce(aggregationsList, context); + final List aggregations = new BucketAggregationList<>(buckets); + reduced.aggregations = InternalAggregations.reduce(aggregations, context); return reduced; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java index 315eda4793a12..bc12555664575 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java @@ -136,14 +136,13 @@ public InternalAggregation finalizeSampling(SamplingContext samplingContext) { @Override protected InternalGeoGridBucket reduceBucket(List buckets, AggregationReduceContext context) { - assert buckets.size() > 0; - List aggregationsList = new ArrayList<>(buckets.size()); + assert buckets.isEmpty() == false; long docCount = 0; for (InternalGeoGridBucket bucket : buckets) { docCount += bucket.docCount; - aggregationsList.add(bucket.aggregations); } - final InternalAggregations aggs = InternalAggregations.reduce(aggregationsList, context); + final List aggregations = new BucketAggregationList<>(buckets); + final InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); return createBucket(buckets.get(0).hashAsLong, docCount, aggs); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java index 8a7561aaab574..a6d3627ecda28 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java @@ -393,14 +393,13 @@ protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent buckets, AggregationReduceContext context) { - assert buckets.size() > 0; - List aggregations = new ArrayList<>(buckets.size()); + assert buckets.isEmpty() == false; long docCount = 0; for (Bucket bucket : buckets) { docCount += bucket.docCount; - aggregations.add(bucket.getAggregations()); } - InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); + final List aggregations = new BucketAggregationList<>(buckets); + final InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); return createBucket(buckets.get(0).key, docCount, aggs); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java index b6d5a705fe0cd..88777d5abde99 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java @@ -348,14 +348,13 @@ protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent buckets, AggregationReduceContext context) { - assert buckets.size() > 0; - List aggregations = new ArrayList<>(buckets.size()); + assert buckets.isEmpty() == false; long docCount = 0; for (Bucket bucket : buckets) { docCount += bucket.docCount; - aggregations.add(bucket.getAggregations()); } - InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); + final List aggregations = new BucketAggregationList<>(buckets); + final InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); return createBucket(buckets.get(0).key, docCount, aggs); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java index 59bb251368c2e..073621575f292 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java @@ -307,7 +307,6 @@ public Number getKey(MultiBucketsAggregation.Bucket bucket) { @Override protected Bucket reduceBucket(List buckets, AggregationReduceContext context) { - List aggregations = new ArrayList<>(buckets.size()); long docCount = 0; double min = Double.POSITIVE_INFINITY; double max = Double.NEGATIVE_INFINITY; @@ -317,11 +316,11 @@ protected Bucket reduceBucket(List buckets, AggregationReduceContext con min = Math.min(min, bucket.bounds.min); max = Math.max(max, bucket.bounds.max); sum += bucket.docCount * bucket.centroid; - aggregations.add(bucket.getAggregations()); } - InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); - double centroid = sum / docCount; - Bucket.BucketBounds bounds = new Bucket.BucketBounds(min, max); + final List aggregations = new BucketAggregationList<>(buckets); + final InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); + final double centroid = sum / docCount; + final Bucket.BucketBounds bounds = new Bucket.BucketBounds(min, max); return new Bucket(centroid, bounds, docCount, format, aggs); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java index f0104599396dd..33c3122e58967 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java @@ -333,14 +333,13 @@ private Bucket createBucket(Bucket prototype, InternalAggregations aggregations, @Override protected Bucket reduceBucket(List buckets, AggregationReduceContext context) { - assert buckets.size() > 0; - List aggregations = new ArrayList<>(buckets.size()); + assert buckets.isEmpty() == false; long docCount = 0; for (InternalIpPrefix.Bucket bucket : buckets) { docCount += bucket.docCount; - aggregations.add(bucket.getAggregations()); } - InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); + final List aggregations = new BucketAggregationList<>(buckets); + final InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); return createBucket(buckets.get(0), aggs, docCount); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java index 131be36db2956..414af918e837d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java @@ -292,9 +292,9 @@ public InternalAggregation finalizeSampling(SamplingContext samplingContext) { @Override protected Bucket reduceBucket(List buckets, AggregationReduceContext context) { - assert buckets.size() > 0; - List aggregationsList = buckets.stream().map(bucket -> bucket.aggregations).toList(); - final InternalAggregations aggs = InternalAggregations.reduce(aggregationsList, context); + assert buckets.isEmpty() == false; + final List aggregations = new BucketAggregationList<>(buckets); + final InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); return createBucket(aggs, buckets.get(0)); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java index 046d5efb97ece..ec0ace8f3e011 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java @@ -369,14 +369,13 @@ public InternalAggregation finalizeSampling(SamplingContext samplingContext) { @Override protected B reduceBucket(List buckets, AggregationReduceContext context) { - assert buckets.size() > 0; + assert buckets.isEmpty() == false; long docCount = 0; - List aggregationsList = new ArrayList<>(buckets.size()); for (Bucket bucket : buckets) { docCount += bucket.docCount; - aggregationsList.add(bucket.aggregations); } - final InternalAggregations aggs = InternalAggregations.reduce(aggregationsList, context); + final List aggregations = new BucketAggregationList<>(buckets); + final InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); Bucket prototype = buckets.get(0); return getFactory().createBucket(prototype.key, prototype.from, prototype.to, docCount, aggs, keyed, format); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java index ca3142a0c0797..ea3762503853e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java @@ -88,14 +88,13 @@ public abstract static class AbstractTermsBucket extends InternalMultiBucketAggr @Override public B reduceBucket(List buckets, AggregationReduceContext context) { - assert buckets.size() > 0; + assert buckets.isEmpty() == false; long docCount = 0; // For the per term doc count error we add up the errors from the // shards that did not respond with the term. To do this we add up // the errors from the shards that did respond with the terms and // subtract that from the sum of the error from all shards long docCountError = 0; - List aggregationsList = new ArrayList<>(buckets.size()); for (B bucket : buckets) { docCount += bucket.getDocCount(); if (docCountError != -1) { @@ -105,9 +104,9 @@ public B reduceBucket(List buckets, AggregationReduceContext context) { docCountError += bucket.getDocCountError(); } } - aggregationsList.add(bucket.getAggregations()); } - InternalAggregations aggs = InternalAggregations.reduce(aggregationsList, context); + final List aggregations = new BucketAggregationList<>(buckets); + final InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); return createBucket(docCount, aggs, docCountError, buckets.get(0)); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java index f3ce541b1b8b9..b5aa8e3973c3e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java @@ -21,7 +21,6 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; -import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; @@ -149,14 +148,13 @@ public InternalAggregation reduce(List aggregations, Aggreg @Override protected B reduceBucket(List buckets, AggregationReduceContext context) { - assert buckets.size() > 0; + assert buckets.isEmpty() == false; long docCount = 0; - List aggregationsList = new ArrayList<>(buckets.size()); for (B bucket : buckets) { docCount += bucket.docCount; - aggregationsList.add(bucket.aggregations); } - InternalAggregations aggs = InternalAggregations.reduce(aggregationsList, context); + final List aggregations = new BucketAggregationList<>(buckets); + final InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); return createBucket(docCount, aggs, buckets.get(0)); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java index d627be186f8ff..be96683b98915 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java @@ -276,16 +276,15 @@ public InternalAggregation finalizeSampling(SamplingContext samplingContext) { @Override protected B reduceBucket(List buckets, AggregationReduceContext context) { - assert buckets.size() > 0; + assert buckets.isEmpty() == false; long subsetDf = 0; long supersetDf = 0; - List aggregationsList = new ArrayList<>(buckets.size()); for (B bucket : buckets) { subsetDf += bucket.subsetDf; supersetDf += bucket.supersetDf; - aggregationsList.add(bucket.aggregations); } - InternalAggregations aggs = InternalAggregations.reduce(aggregationsList, context); + final List aggregations = new BucketAggregationList<>(buckets); + final InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); return createBucket(subsetDf, buckets.get(0).subsetSize, supersetDf, buckets.get(0).supersetSize, aggs, buckets.get(0)); } From 669934fc0d33429520d5b4c8c5d23a3433328e18 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Tue, 6 Feb 2024 18:17:11 +0100 Subject: [PATCH 073/106] ES|QL: remove PROJECT keyword from the grammar (#105064) --- docs/changelog/105064.yaml | 17 + .../esql/src/main/antlr/EsqlBaseLexer.g4 | 3 +- .../esql/src/main/antlr/EsqlBaseLexer.tokens | 286 ++-- .../esql/src/main/antlr/EsqlBaseParser.g4 | 1 - .../esql/src/main/antlr/EsqlBaseParser.tokens | 286 ++-- .../xpack/esql/parser/EsqlBaseLexer.interp | 5 +- .../xpack/esql/parser/EsqlBaseLexer.java | 1486 ++++++++--------- .../xpack/esql/parser/EsqlBaseParser.interp | 4 +- .../xpack/esql/parser/EsqlBaseParser.java | 865 +++++----- .../xpack/esql/parser/LogicalPlanBuilder.java | 4 - .../esql/parser/StatementParserTests.java | 5 +- 11 files changed, 1457 insertions(+), 1505 deletions(-) create mode 100644 docs/changelog/105064.yaml diff --git a/docs/changelog/105064.yaml b/docs/changelog/105064.yaml new file mode 100644 index 0000000000000..81c62b3148f1c --- /dev/null +++ b/docs/changelog/105064.yaml @@ -0,0 +1,17 @@ +pr: 105064 +summary: "ES|QL: remove PROJECT keyword from the grammar" +area: ES|QL +type: breaking +issues: [] +breaking: + title: "ES|QL: remove PROJECT keyword from the grammar" + area: REST API + details: "Removes the PROJECT keyword (an alias for KEEP) from ES|QL grammar" + impact: "Before this change, users could use PROJECT as an alias for KEEP in ESQL queries,\ + \ (eg. 'FROM idx | PROJECT name, surname')\ + \ the parser replaced PROJECT with KEEP, emitted a warning:\ + \ 'PROJECT command is no longer supported, please use KEEP instead'\ + \ and the query was executed normally.\ + \ With this change, PROJECT command is no longer recognized by the query parser;\ + \ queries using PROJECT command now return a parsing exception." + notable: false diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index 81f12997248c8..16cc2656b9d71 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -11,7 +11,6 @@ INLINESTATS : 'inlinestats' -> pushMode(EXPRESSION_MODE); KEEP : 'keep' -> pushMode(PROJECT_MODE); LIMIT : 'limit' -> pushMode(EXPRESSION_MODE); MV_EXPAND : 'mv_expand' -> pushMode(MVEXPAND_MODE); -PROJECT : 'project' -> pushMode(PROJECT_MODE); RENAME : 'rename' -> pushMode(RENAME_MODE); ROW : 'row' -> pushMode(EXPRESSION_MODE); SHOW : 'show' -> pushMode(SHOW_MODE); @@ -210,7 +209,7 @@ FROM_WS : WS -> channel(HIDDEN) ; // -// DROP, KEEP, PROJECT +// DROP, KEEP // mode PROJECT_MODE; PROJECT_PIPE : PIPE -> type(PIPE), popMode; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens index be2d95ba9531f..1e3bfb20a2c8b 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens @@ -9,100 +9,99 @@ INLINESTATS=8 KEEP=9 LIMIT=10 MV_EXPAND=11 -PROJECT=12 -RENAME=13 -ROW=14 -SHOW=15 -SORT=16 -STATS=17 -WHERE=18 -UNKNOWN_CMD=19 -LINE_COMMENT=20 -MULTILINE_COMMENT=21 -WS=22 -EXPLAIN_WS=23 -EXPLAIN_LINE_COMMENT=24 -EXPLAIN_MULTILINE_COMMENT=25 -PIPE=26 -STRING=27 -INTEGER_LITERAL=28 -DECIMAL_LITERAL=29 -BY=30 -AND=31 -ASC=32 -ASSIGN=33 -COMMA=34 -DESC=35 -DOT=36 -FALSE=37 -FIRST=38 -LAST=39 -LP=40 -IN=41 -IS=42 -LIKE=43 -NOT=44 -NULL=45 -NULLS=46 -OR=47 -PARAM=48 -RLIKE=49 -RP=50 -TRUE=51 -EQ=52 -CIEQ=53 -NEQ=54 -LT=55 -LTE=56 -GT=57 -GTE=58 -PLUS=59 -MINUS=60 -ASTERISK=61 -SLASH=62 -PERCENT=63 -OPENING_BRACKET=64 -CLOSING_BRACKET=65 -UNQUOTED_IDENTIFIER=66 -QUOTED_IDENTIFIER=67 -EXPR_LINE_COMMENT=68 -EXPR_MULTILINE_COMMENT=69 -EXPR_WS=70 -METADATA=71 -FROM_UNQUOTED_IDENTIFIER=72 -FROM_LINE_COMMENT=73 -FROM_MULTILINE_COMMENT=74 -FROM_WS=75 -UNQUOTED_ID_PATTERN=76 -PROJECT_LINE_COMMENT=77 -PROJECT_MULTILINE_COMMENT=78 -PROJECT_WS=79 -AS=80 -RENAME_LINE_COMMENT=81 -RENAME_MULTILINE_COMMENT=82 -RENAME_WS=83 -ON=84 -WITH=85 -ENRICH_POLICY_NAME=86 -ENRICH_LINE_COMMENT=87 -ENRICH_MULTILINE_COMMENT=88 -ENRICH_WS=89 -ENRICH_FIELD_LINE_COMMENT=90 -ENRICH_FIELD_MULTILINE_COMMENT=91 -ENRICH_FIELD_WS=92 -MVEXPAND_LINE_COMMENT=93 -MVEXPAND_MULTILINE_COMMENT=94 -MVEXPAND_WS=95 -INFO=96 -FUNCTIONS=97 -SHOW_LINE_COMMENT=98 -SHOW_MULTILINE_COMMENT=99 -SHOW_WS=100 -COLON=101 -SETTING=102 -SETTING_LINE_COMMENT=103 -SETTTING_MULTILINE_COMMENT=104 -SETTING_WS=105 +RENAME=12 +ROW=13 +SHOW=14 +SORT=15 +STATS=16 +WHERE=17 +UNKNOWN_CMD=18 +LINE_COMMENT=19 +MULTILINE_COMMENT=20 +WS=21 +EXPLAIN_WS=22 +EXPLAIN_LINE_COMMENT=23 +EXPLAIN_MULTILINE_COMMENT=24 +PIPE=25 +STRING=26 +INTEGER_LITERAL=27 +DECIMAL_LITERAL=28 +BY=29 +AND=30 +ASC=31 +ASSIGN=32 +COMMA=33 +DESC=34 +DOT=35 +FALSE=36 +FIRST=37 +LAST=38 +LP=39 +IN=40 +IS=41 +LIKE=42 +NOT=43 +NULL=44 +NULLS=45 +OR=46 +PARAM=47 +RLIKE=48 +RP=49 +TRUE=50 +EQ=51 +CIEQ=52 +NEQ=53 +LT=54 +LTE=55 +GT=56 +GTE=57 +PLUS=58 +MINUS=59 +ASTERISK=60 +SLASH=61 +PERCENT=62 +OPENING_BRACKET=63 +CLOSING_BRACKET=64 +UNQUOTED_IDENTIFIER=65 +QUOTED_IDENTIFIER=66 +EXPR_LINE_COMMENT=67 +EXPR_MULTILINE_COMMENT=68 +EXPR_WS=69 +METADATA=70 +FROM_UNQUOTED_IDENTIFIER=71 +FROM_LINE_COMMENT=72 +FROM_MULTILINE_COMMENT=73 +FROM_WS=74 +UNQUOTED_ID_PATTERN=75 +PROJECT_LINE_COMMENT=76 +PROJECT_MULTILINE_COMMENT=77 +PROJECT_WS=78 +AS=79 +RENAME_LINE_COMMENT=80 +RENAME_MULTILINE_COMMENT=81 +RENAME_WS=82 +ON=83 +WITH=84 +ENRICH_POLICY_NAME=85 +ENRICH_LINE_COMMENT=86 +ENRICH_MULTILINE_COMMENT=87 +ENRICH_WS=88 +ENRICH_FIELD_LINE_COMMENT=89 +ENRICH_FIELD_MULTILINE_COMMENT=90 +ENRICH_FIELD_WS=91 +MVEXPAND_LINE_COMMENT=92 +MVEXPAND_MULTILINE_COMMENT=93 +MVEXPAND_WS=94 +INFO=95 +FUNCTIONS=96 +SHOW_LINE_COMMENT=97 +SHOW_MULTILINE_COMMENT=98 +SHOW_WS=99 +COLON=100 +SETTING=101 +SETTING_LINE_COMMENT=102 +SETTTING_MULTILINE_COMMENT=103 +SETTING_WS=104 'dissect'=1 'drop'=2 'enrich'=3 @@ -114,53 +113,52 @@ SETTING_WS=105 'keep'=9 'limit'=10 'mv_expand'=11 -'project'=12 -'rename'=13 -'row'=14 -'show'=15 -'sort'=16 -'stats'=17 -'where'=18 -'|'=26 -'by'=30 -'and'=31 -'asc'=32 -'='=33 -','=34 -'desc'=35 -'.'=36 -'false'=37 -'first'=38 -'last'=39 -'('=40 -'in'=41 -'is'=42 -'like'=43 -'not'=44 -'null'=45 -'nulls'=46 -'or'=47 -'?'=48 -'rlike'=49 -')'=50 -'true'=51 -'=='=52 -'=~'=53 -'!='=54 -'<'=55 -'<='=56 -'>'=57 -'>='=58 -'+'=59 -'-'=60 -'*'=61 -'/'=62 -'%'=63 -']'=65 -'metadata'=71 -'as'=80 -'on'=84 -'with'=85 -'info'=96 -'functions'=97 -':'=101 +'rename'=12 +'row'=13 +'show'=14 +'sort'=15 +'stats'=16 +'where'=17 +'|'=25 +'by'=29 +'and'=30 +'asc'=31 +'='=32 +','=33 +'desc'=34 +'.'=35 +'false'=36 +'first'=37 +'last'=38 +'('=39 +'in'=40 +'is'=41 +'like'=42 +'not'=43 +'null'=44 +'nulls'=45 +'or'=46 +'?'=47 +'rlike'=48 +')'=49 +'true'=50 +'=='=51 +'=~'=52 +'!='=53 +'<'=54 +'<='=55 +'>'=56 +'>='=57 +'+'=58 +'-'=59 +'*'=60 +'/'=61 +'%'=62 +']'=64 +'metadata'=70 +'as'=79 +'on'=83 +'with'=84 +'info'=95 +'functions'=96 +':'=100 diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index a9539bff1b765..34b009c3900e8 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -168,7 +168,6 @@ orderExpression keepCommand : KEEP qualifiedNamePattern (COMMA qualifiedNamePattern)* - | PROJECT qualifiedNamePattern (COMMA qualifiedNamePattern)* ; dropCommand diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens index be2d95ba9531f..1e3bfb20a2c8b 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens @@ -9,100 +9,99 @@ INLINESTATS=8 KEEP=9 LIMIT=10 MV_EXPAND=11 -PROJECT=12 -RENAME=13 -ROW=14 -SHOW=15 -SORT=16 -STATS=17 -WHERE=18 -UNKNOWN_CMD=19 -LINE_COMMENT=20 -MULTILINE_COMMENT=21 -WS=22 -EXPLAIN_WS=23 -EXPLAIN_LINE_COMMENT=24 -EXPLAIN_MULTILINE_COMMENT=25 -PIPE=26 -STRING=27 -INTEGER_LITERAL=28 -DECIMAL_LITERAL=29 -BY=30 -AND=31 -ASC=32 -ASSIGN=33 -COMMA=34 -DESC=35 -DOT=36 -FALSE=37 -FIRST=38 -LAST=39 -LP=40 -IN=41 -IS=42 -LIKE=43 -NOT=44 -NULL=45 -NULLS=46 -OR=47 -PARAM=48 -RLIKE=49 -RP=50 -TRUE=51 -EQ=52 -CIEQ=53 -NEQ=54 -LT=55 -LTE=56 -GT=57 -GTE=58 -PLUS=59 -MINUS=60 -ASTERISK=61 -SLASH=62 -PERCENT=63 -OPENING_BRACKET=64 -CLOSING_BRACKET=65 -UNQUOTED_IDENTIFIER=66 -QUOTED_IDENTIFIER=67 -EXPR_LINE_COMMENT=68 -EXPR_MULTILINE_COMMENT=69 -EXPR_WS=70 -METADATA=71 -FROM_UNQUOTED_IDENTIFIER=72 -FROM_LINE_COMMENT=73 -FROM_MULTILINE_COMMENT=74 -FROM_WS=75 -UNQUOTED_ID_PATTERN=76 -PROJECT_LINE_COMMENT=77 -PROJECT_MULTILINE_COMMENT=78 -PROJECT_WS=79 -AS=80 -RENAME_LINE_COMMENT=81 -RENAME_MULTILINE_COMMENT=82 -RENAME_WS=83 -ON=84 -WITH=85 -ENRICH_POLICY_NAME=86 -ENRICH_LINE_COMMENT=87 -ENRICH_MULTILINE_COMMENT=88 -ENRICH_WS=89 -ENRICH_FIELD_LINE_COMMENT=90 -ENRICH_FIELD_MULTILINE_COMMENT=91 -ENRICH_FIELD_WS=92 -MVEXPAND_LINE_COMMENT=93 -MVEXPAND_MULTILINE_COMMENT=94 -MVEXPAND_WS=95 -INFO=96 -FUNCTIONS=97 -SHOW_LINE_COMMENT=98 -SHOW_MULTILINE_COMMENT=99 -SHOW_WS=100 -COLON=101 -SETTING=102 -SETTING_LINE_COMMENT=103 -SETTTING_MULTILINE_COMMENT=104 -SETTING_WS=105 +RENAME=12 +ROW=13 +SHOW=14 +SORT=15 +STATS=16 +WHERE=17 +UNKNOWN_CMD=18 +LINE_COMMENT=19 +MULTILINE_COMMENT=20 +WS=21 +EXPLAIN_WS=22 +EXPLAIN_LINE_COMMENT=23 +EXPLAIN_MULTILINE_COMMENT=24 +PIPE=25 +STRING=26 +INTEGER_LITERAL=27 +DECIMAL_LITERAL=28 +BY=29 +AND=30 +ASC=31 +ASSIGN=32 +COMMA=33 +DESC=34 +DOT=35 +FALSE=36 +FIRST=37 +LAST=38 +LP=39 +IN=40 +IS=41 +LIKE=42 +NOT=43 +NULL=44 +NULLS=45 +OR=46 +PARAM=47 +RLIKE=48 +RP=49 +TRUE=50 +EQ=51 +CIEQ=52 +NEQ=53 +LT=54 +LTE=55 +GT=56 +GTE=57 +PLUS=58 +MINUS=59 +ASTERISK=60 +SLASH=61 +PERCENT=62 +OPENING_BRACKET=63 +CLOSING_BRACKET=64 +UNQUOTED_IDENTIFIER=65 +QUOTED_IDENTIFIER=66 +EXPR_LINE_COMMENT=67 +EXPR_MULTILINE_COMMENT=68 +EXPR_WS=69 +METADATA=70 +FROM_UNQUOTED_IDENTIFIER=71 +FROM_LINE_COMMENT=72 +FROM_MULTILINE_COMMENT=73 +FROM_WS=74 +UNQUOTED_ID_PATTERN=75 +PROJECT_LINE_COMMENT=76 +PROJECT_MULTILINE_COMMENT=77 +PROJECT_WS=78 +AS=79 +RENAME_LINE_COMMENT=80 +RENAME_MULTILINE_COMMENT=81 +RENAME_WS=82 +ON=83 +WITH=84 +ENRICH_POLICY_NAME=85 +ENRICH_LINE_COMMENT=86 +ENRICH_MULTILINE_COMMENT=87 +ENRICH_WS=88 +ENRICH_FIELD_LINE_COMMENT=89 +ENRICH_FIELD_MULTILINE_COMMENT=90 +ENRICH_FIELD_WS=91 +MVEXPAND_LINE_COMMENT=92 +MVEXPAND_MULTILINE_COMMENT=93 +MVEXPAND_WS=94 +INFO=95 +FUNCTIONS=96 +SHOW_LINE_COMMENT=97 +SHOW_MULTILINE_COMMENT=98 +SHOW_WS=99 +COLON=100 +SETTING=101 +SETTING_LINE_COMMENT=102 +SETTTING_MULTILINE_COMMENT=103 +SETTING_WS=104 'dissect'=1 'drop'=2 'enrich'=3 @@ -114,53 +113,52 @@ SETTING_WS=105 'keep'=9 'limit'=10 'mv_expand'=11 -'project'=12 -'rename'=13 -'row'=14 -'show'=15 -'sort'=16 -'stats'=17 -'where'=18 -'|'=26 -'by'=30 -'and'=31 -'asc'=32 -'='=33 -','=34 -'desc'=35 -'.'=36 -'false'=37 -'first'=38 -'last'=39 -'('=40 -'in'=41 -'is'=42 -'like'=43 -'not'=44 -'null'=45 -'nulls'=46 -'or'=47 -'?'=48 -'rlike'=49 -')'=50 -'true'=51 -'=='=52 -'=~'=53 -'!='=54 -'<'=55 -'<='=56 -'>'=57 -'>='=58 -'+'=59 -'-'=60 -'*'=61 -'/'=62 -'%'=63 -']'=65 -'metadata'=71 -'as'=80 -'on'=84 -'with'=85 -'info'=96 -'functions'=97 -':'=101 +'rename'=12 +'row'=13 +'show'=14 +'sort'=15 +'stats'=16 +'where'=17 +'|'=25 +'by'=29 +'and'=30 +'asc'=31 +'='=32 +','=33 +'desc'=34 +'.'=35 +'false'=36 +'first'=37 +'last'=38 +'('=39 +'in'=40 +'is'=41 +'like'=42 +'not'=43 +'null'=44 +'nulls'=45 +'or'=46 +'?'=47 +'rlike'=48 +')'=49 +'true'=50 +'=='=51 +'=~'=52 +'!='=53 +'<'=54 +'<='=55 +'>'=56 +'>='=57 +'+'=58 +'-'=59 +'*'=60 +'/'=61 +'%'=62 +']'=64 +'metadata'=70 +'as'=79 +'on'=83 +'with'=84 +'info'=95 +'functions'=96 +':'=100 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index 17aeb195154d6..a20a5df4c0fc8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -11,7 +11,6 @@ null 'keep' 'limit' 'mv_expand' -'project' 'rename' 'row' 'show' @@ -119,7 +118,6 @@ INLINESTATS KEEP LIMIT MV_EXPAND -PROJECT RENAME ROW SHOW @@ -226,7 +224,6 @@ INLINESTATS KEEP LIMIT MV_EXPAND -PROJECT RENAME ROW SHOW @@ -388,4 +385,4 @@ SHOW_MODE SETTING_MODE atn: -[4, 0, 105, 1161, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 4, 18, 474, 8, 18, 11, 18, 12, 18, 475, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 484, 8, 19, 10, 19, 12, 19, 487, 9, 19, 1, 19, 3, 19, 490, 8, 19, 1, 19, 3, 19, 493, 8, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 502, 8, 20, 10, 20, 12, 20, 505, 9, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 4, 21, 513, 8, 21, 11, 21, 12, 21, 514, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 3, 32, 556, 8, 32, 1, 32, 4, 32, 559, 8, 32, 11, 32, 12, 32, 560, 1, 33, 1, 33, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 3, 35, 570, 8, 35, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 3, 37, 577, 8, 37, 1, 38, 1, 38, 1, 38, 5, 38, 582, 8, 38, 10, 38, 12, 38, 585, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 593, 8, 38, 10, 38, 12, 38, 596, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 603, 8, 38, 1, 38, 3, 38, 606, 8, 38, 3, 38, 608, 8, 38, 1, 39, 4, 39, 611, 8, 39, 11, 39, 12, 39, 612, 1, 40, 4, 40, 616, 8, 40, 11, 40, 12, 40, 617, 1, 40, 1, 40, 5, 40, 622, 8, 40, 10, 40, 12, 40, 625, 9, 40, 1, 40, 1, 40, 4, 40, 629, 8, 40, 11, 40, 12, 40, 630, 1, 40, 4, 40, 634, 8, 40, 11, 40, 12, 40, 635, 1, 40, 1, 40, 5, 40, 640, 8, 40, 10, 40, 12, 40, 643, 9, 40, 3, 40, 645, 8, 40, 1, 40, 1, 40, 1, 40, 1, 40, 4, 40, 651, 8, 40, 11, 40, 12, 40, 652, 1, 40, 1, 40, 3, 40, 657, 8, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 69, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 5, 77, 785, 8, 77, 10, 77, 12, 77, 788, 9, 77, 1, 77, 1, 77, 3, 77, 792, 8, 77, 1, 77, 4, 77, 795, 8, 77, 11, 77, 12, 77, 796, 3, 77, 799, 8, 77, 1, 78, 1, 78, 4, 78, 803, 8, 78, 11, 78, 12, 78, 804, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 87, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 3, 88, 854, 8, 88, 1, 89, 4, 89, 857, 8, 89, 11, 89, 12, 89, 858, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 3, 97, 894, 8, 97, 1, 98, 1, 98, 3, 98, 898, 8, 98, 1, 98, 5, 98, 901, 8, 98, 10, 98, 12, 98, 904, 9, 98, 1, 98, 1, 98, 3, 98, 908, 8, 98, 1, 98, 4, 98, 911, 8, 98, 11, 98, 12, 98, 912, 3, 98, 915, 8, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 119, 1, 119, 3, 119, 1003, 8, 119, 1, 119, 5, 119, 1006, 8, 119, 10, 119, 12, 119, 1009, 9, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 144, 1, 144, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 150, 4, 150, 1146, 8, 150, 11, 150, 12, 150, 1147, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 2, 503, 594, 0, 154, 11, 1, 13, 2, 15, 3, 17, 4, 19, 5, 21, 6, 23, 7, 25, 8, 27, 9, 29, 10, 31, 11, 33, 12, 35, 13, 37, 14, 39, 15, 41, 16, 43, 17, 45, 18, 47, 19, 49, 20, 51, 21, 53, 22, 55, 0, 57, 0, 59, 23, 61, 24, 63, 25, 65, 26, 67, 0, 69, 0, 71, 0, 73, 0, 75, 0, 77, 0, 79, 0, 81, 0, 83, 0, 85, 0, 87, 27, 89, 28, 91, 29, 93, 30, 95, 31, 97, 32, 99, 33, 101, 34, 103, 35, 105, 36, 107, 37, 109, 38, 111, 39, 113, 40, 115, 41, 117, 42, 119, 43, 121, 44, 123, 45, 125, 46, 127, 47, 129, 48, 131, 49, 133, 50, 135, 51, 137, 52, 139, 53, 141, 54, 143, 55, 145, 56, 147, 57, 149, 58, 151, 59, 153, 60, 155, 61, 157, 62, 159, 63, 161, 64, 163, 65, 165, 66, 167, 67, 169, 68, 171, 69, 173, 70, 175, 0, 177, 0, 179, 0, 181, 0, 183, 0, 185, 71, 187, 0, 189, 72, 191, 0, 193, 73, 195, 74, 197, 75, 199, 0, 201, 0, 203, 0, 205, 0, 207, 76, 209, 0, 211, 0, 213, 77, 215, 78, 217, 79, 219, 0, 221, 0, 223, 0, 225, 0, 227, 80, 229, 0, 231, 0, 233, 81, 235, 82, 237, 83, 239, 0, 241, 0, 243, 84, 245, 85, 247, 0, 249, 86, 251, 0, 253, 0, 255, 87, 257, 88, 259, 89, 261, 0, 263, 0, 265, 0, 267, 0, 269, 0, 271, 0, 273, 0, 275, 90, 277, 91, 279, 92, 281, 0, 283, 0, 285, 0, 287, 0, 289, 93, 291, 94, 293, 95, 295, 0, 297, 96, 299, 97, 301, 98, 303, 99, 305, 100, 307, 0, 309, 101, 311, 102, 313, 103, 315, 104, 317, 105, 11, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 13, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 5, 0, 34, 34, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 10, 0, 9, 10, 13, 13, 32, 32, 44, 44, 47, 47, 61, 61, 91, 91, 93, 93, 96, 96, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1188, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 1, 55, 1, 0, 0, 0, 1, 57, 1, 0, 0, 0, 1, 59, 1, 0, 0, 0, 1, 61, 1, 0, 0, 0, 1, 63, 1, 0, 0, 0, 2, 65, 1, 0, 0, 0, 2, 87, 1, 0, 0, 0, 2, 89, 1, 0, 0, 0, 2, 91, 1, 0, 0, 0, 2, 93, 1, 0, 0, 0, 2, 95, 1, 0, 0, 0, 2, 97, 1, 0, 0, 0, 2, 99, 1, 0, 0, 0, 2, 101, 1, 0, 0, 0, 2, 103, 1, 0, 0, 0, 2, 105, 1, 0, 0, 0, 2, 107, 1, 0, 0, 0, 2, 109, 1, 0, 0, 0, 2, 111, 1, 0, 0, 0, 2, 113, 1, 0, 0, 0, 2, 115, 1, 0, 0, 0, 2, 117, 1, 0, 0, 0, 2, 119, 1, 0, 0, 0, 2, 121, 1, 0, 0, 0, 2, 123, 1, 0, 0, 0, 2, 125, 1, 0, 0, 0, 2, 127, 1, 0, 0, 0, 2, 129, 1, 0, 0, 0, 2, 131, 1, 0, 0, 0, 2, 133, 1, 0, 0, 0, 2, 135, 1, 0, 0, 0, 2, 137, 1, 0, 0, 0, 2, 139, 1, 0, 0, 0, 2, 141, 1, 0, 0, 0, 2, 143, 1, 0, 0, 0, 2, 145, 1, 0, 0, 0, 2, 147, 1, 0, 0, 0, 2, 149, 1, 0, 0, 0, 2, 151, 1, 0, 0, 0, 2, 153, 1, 0, 0, 0, 2, 155, 1, 0, 0, 0, 2, 157, 1, 0, 0, 0, 2, 159, 1, 0, 0, 0, 2, 161, 1, 0, 0, 0, 2, 163, 1, 0, 0, 0, 2, 165, 1, 0, 0, 0, 2, 167, 1, 0, 0, 0, 2, 169, 1, 0, 0, 0, 2, 171, 1, 0, 0, 0, 2, 173, 1, 0, 0, 0, 3, 175, 1, 0, 0, 0, 3, 177, 1, 0, 0, 0, 3, 179, 1, 0, 0, 0, 3, 181, 1, 0, 0, 0, 3, 183, 1, 0, 0, 0, 3, 185, 1, 0, 0, 0, 3, 189, 1, 0, 0, 0, 3, 191, 1, 0, 0, 0, 3, 193, 1, 0, 0, 0, 3, 195, 1, 0, 0, 0, 3, 197, 1, 0, 0, 0, 4, 199, 1, 0, 0, 0, 4, 201, 1, 0, 0, 0, 4, 203, 1, 0, 0, 0, 4, 207, 1, 0, 0, 0, 4, 209, 1, 0, 0, 0, 4, 211, 1, 0, 0, 0, 4, 213, 1, 0, 0, 0, 4, 215, 1, 0, 0, 0, 4, 217, 1, 0, 0, 0, 5, 219, 1, 0, 0, 0, 5, 221, 1, 0, 0, 0, 5, 223, 1, 0, 0, 0, 5, 225, 1, 0, 0, 0, 5, 227, 1, 0, 0, 0, 5, 229, 1, 0, 0, 0, 5, 231, 1, 0, 0, 0, 5, 233, 1, 0, 0, 0, 5, 235, 1, 0, 0, 0, 5, 237, 1, 0, 0, 0, 6, 239, 1, 0, 0, 0, 6, 241, 1, 0, 0, 0, 6, 243, 1, 0, 0, 0, 6, 245, 1, 0, 0, 0, 6, 249, 1, 0, 0, 0, 6, 251, 1, 0, 0, 0, 6, 253, 1, 0, 0, 0, 6, 255, 1, 0, 0, 0, 6, 257, 1, 0, 0, 0, 6, 259, 1, 0, 0, 0, 7, 261, 1, 0, 0, 0, 7, 263, 1, 0, 0, 0, 7, 265, 1, 0, 0, 0, 7, 267, 1, 0, 0, 0, 7, 269, 1, 0, 0, 0, 7, 271, 1, 0, 0, 0, 7, 273, 1, 0, 0, 0, 7, 275, 1, 0, 0, 0, 7, 277, 1, 0, 0, 0, 7, 279, 1, 0, 0, 0, 8, 281, 1, 0, 0, 0, 8, 283, 1, 0, 0, 0, 8, 285, 1, 0, 0, 0, 8, 287, 1, 0, 0, 0, 8, 289, 1, 0, 0, 0, 8, 291, 1, 0, 0, 0, 8, 293, 1, 0, 0, 0, 9, 295, 1, 0, 0, 0, 9, 297, 1, 0, 0, 0, 9, 299, 1, 0, 0, 0, 9, 301, 1, 0, 0, 0, 9, 303, 1, 0, 0, 0, 9, 305, 1, 0, 0, 0, 10, 307, 1, 0, 0, 0, 10, 309, 1, 0, 0, 0, 10, 311, 1, 0, 0, 0, 10, 313, 1, 0, 0, 0, 10, 315, 1, 0, 0, 0, 10, 317, 1, 0, 0, 0, 11, 319, 1, 0, 0, 0, 13, 329, 1, 0, 0, 0, 15, 336, 1, 0, 0, 0, 17, 345, 1, 0, 0, 0, 19, 352, 1, 0, 0, 0, 21, 362, 1, 0, 0, 0, 23, 369, 1, 0, 0, 0, 25, 376, 1, 0, 0, 0, 27, 390, 1, 0, 0, 0, 29, 397, 1, 0, 0, 0, 31, 405, 1, 0, 0, 0, 33, 417, 1, 0, 0, 0, 35, 427, 1, 0, 0, 0, 37, 436, 1, 0, 0, 0, 39, 442, 1, 0, 0, 0, 41, 449, 1, 0, 0, 0, 43, 456, 1, 0, 0, 0, 45, 464, 1, 0, 0, 0, 47, 473, 1, 0, 0, 0, 49, 479, 1, 0, 0, 0, 51, 496, 1, 0, 0, 0, 53, 512, 1, 0, 0, 0, 55, 518, 1, 0, 0, 0, 57, 523, 1, 0, 0, 0, 59, 528, 1, 0, 0, 0, 61, 532, 1, 0, 0, 0, 63, 536, 1, 0, 0, 0, 65, 540, 1, 0, 0, 0, 67, 544, 1, 0, 0, 0, 69, 546, 1, 0, 0, 0, 71, 548, 1, 0, 0, 0, 73, 551, 1, 0, 0, 0, 75, 553, 1, 0, 0, 0, 77, 562, 1, 0, 0, 0, 79, 564, 1, 0, 0, 0, 81, 569, 1, 0, 0, 0, 83, 571, 1, 0, 0, 0, 85, 576, 1, 0, 0, 0, 87, 607, 1, 0, 0, 0, 89, 610, 1, 0, 0, 0, 91, 656, 1, 0, 0, 0, 93, 658, 1, 0, 0, 0, 95, 661, 1, 0, 0, 0, 97, 665, 1, 0, 0, 0, 99, 669, 1, 0, 0, 0, 101, 671, 1, 0, 0, 0, 103, 673, 1, 0, 0, 0, 105, 678, 1, 0, 0, 0, 107, 680, 1, 0, 0, 0, 109, 686, 1, 0, 0, 0, 111, 692, 1, 0, 0, 0, 113, 697, 1, 0, 0, 0, 115, 699, 1, 0, 0, 0, 117, 702, 1, 0, 0, 0, 119, 705, 1, 0, 0, 0, 121, 710, 1, 0, 0, 0, 123, 714, 1, 0, 0, 0, 125, 719, 1, 0, 0, 0, 127, 725, 1, 0, 0, 0, 129, 728, 1, 0, 0, 0, 131, 730, 1, 0, 0, 0, 133, 736, 1, 0, 0, 0, 135, 738, 1, 0, 0, 0, 137, 743, 1, 0, 0, 0, 139, 746, 1, 0, 0, 0, 141, 749, 1, 0, 0, 0, 143, 752, 1, 0, 0, 0, 145, 754, 1, 0, 0, 0, 147, 757, 1, 0, 0, 0, 149, 759, 1, 0, 0, 0, 151, 762, 1, 0, 0, 0, 153, 764, 1, 0, 0, 0, 155, 766, 1, 0, 0, 0, 157, 768, 1, 0, 0, 0, 159, 770, 1, 0, 0, 0, 161, 772, 1, 0, 0, 0, 163, 777, 1, 0, 0, 0, 165, 798, 1, 0, 0, 0, 167, 800, 1, 0, 0, 0, 169, 808, 1, 0, 0, 0, 171, 812, 1, 0, 0, 0, 173, 816, 1, 0, 0, 0, 175, 820, 1, 0, 0, 0, 177, 825, 1, 0, 0, 0, 179, 829, 1, 0, 0, 0, 181, 833, 1, 0, 0, 0, 183, 837, 1, 0, 0, 0, 185, 841, 1, 0, 0, 0, 187, 853, 1, 0, 0, 0, 189, 856, 1, 0, 0, 0, 191, 860, 1, 0, 0, 0, 193, 864, 1, 0, 0, 0, 195, 868, 1, 0, 0, 0, 197, 872, 1, 0, 0, 0, 199, 876, 1, 0, 0, 0, 201, 881, 1, 0, 0, 0, 203, 885, 1, 0, 0, 0, 205, 893, 1, 0, 0, 0, 207, 914, 1, 0, 0, 0, 209, 916, 1, 0, 0, 0, 211, 920, 1, 0, 0, 0, 213, 924, 1, 0, 0, 0, 215, 928, 1, 0, 0, 0, 217, 932, 1, 0, 0, 0, 219, 936, 1, 0, 0, 0, 221, 941, 1, 0, 0, 0, 223, 945, 1, 0, 0, 0, 225, 949, 1, 0, 0, 0, 227, 953, 1, 0, 0, 0, 229, 956, 1, 0, 0, 0, 231, 960, 1, 0, 0, 0, 233, 964, 1, 0, 0, 0, 235, 968, 1, 0, 0, 0, 237, 972, 1, 0, 0, 0, 239, 976, 1, 0, 0, 0, 241, 981, 1, 0, 0, 0, 243, 986, 1, 0, 0, 0, 245, 991, 1, 0, 0, 0, 247, 998, 1, 0, 0, 0, 249, 1002, 1, 0, 0, 0, 251, 1010, 1, 0, 0, 0, 253, 1014, 1, 0, 0, 0, 255, 1018, 1, 0, 0, 0, 257, 1022, 1, 0, 0, 0, 259, 1026, 1, 0, 0, 0, 261, 1030, 1, 0, 0, 0, 263, 1036, 1, 0, 0, 0, 265, 1040, 1, 0, 0, 0, 267, 1044, 1, 0, 0, 0, 269, 1048, 1, 0, 0, 0, 271, 1052, 1, 0, 0, 0, 273, 1056, 1, 0, 0, 0, 275, 1060, 1, 0, 0, 0, 277, 1064, 1, 0, 0, 0, 279, 1068, 1, 0, 0, 0, 281, 1072, 1, 0, 0, 0, 283, 1077, 1, 0, 0, 0, 285, 1081, 1, 0, 0, 0, 287, 1085, 1, 0, 0, 0, 289, 1089, 1, 0, 0, 0, 291, 1093, 1, 0, 0, 0, 293, 1097, 1, 0, 0, 0, 295, 1101, 1, 0, 0, 0, 297, 1106, 1, 0, 0, 0, 299, 1111, 1, 0, 0, 0, 301, 1121, 1, 0, 0, 0, 303, 1125, 1, 0, 0, 0, 305, 1129, 1, 0, 0, 0, 307, 1133, 1, 0, 0, 0, 309, 1138, 1, 0, 0, 0, 311, 1145, 1, 0, 0, 0, 313, 1149, 1, 0, 0, 0, 315, 1153, 1, 0, 0, 0, 317, 1157, 1, 0, 0, 0, 319, 320, 5, 100, 0, 0, 320, 321, 5, 105, 0, 0, 321, 322, 5, 115, 0, 0, 322, 323, 5, 115, 0, 0, 323, 324, 5, 101, 0, 0, 324, 325, 5, 99, 0, 0, 325, 326, 5, 116, 0, 0, 326, 327, 1, 0, 0, 0, 327, 328, 6, 0, 0, 0, 328, 12, 1, 0, 0, 0, 329, 330, 5, 100, 0, 0, 330, 331, 5, 114, 0, 0, 331, 332, 5, 111, 0, 0, 332, 333, 5, 112, 0, 0, 333, 334, 1, 0, 0, 0, 334, 335, 6, 1, 1, 0, 335, 14, 1, 0, 0, 0, 336, 337, 5, 101, 0, 0, 337, 338, 5, 110, 0, 0, 338, 339, 5, 114, 0, 0, 339, 340, 5, 105, 0, 0, 340, 341, 5, 99, 0, 0, 341, 342, 5, 104, 0, 0, 342, 343, 1, 0, 0, 0, 343, 344, 6, 2, 2, 0, 344, 16, 1, 0, 0, 0, 345, 346, 5, 101, 0, 0, 346, 347, 5, 118, 0, 0, 347, 348, 5, 97, 0, 0, 348, 349, 5, 108, 0, 0, 349, 350, 1, 0, 0, 0, 350, 351, 6, 3, 0, 0, 351, 18, 1, 0, 0, 0, 352, 353, 5, 101, 0, 0, 353, 354, 5, 120, 0, 0, 354, 355, 5, 112, 0, 0, 355, 356, 5, 108, 0, 0, 356, 357, 5, 97, 0, 0, 357, 358, 5, 105, 0, 0, 358, 359, 5, 110, 0, 0, 359, 360, 1, 0, 0, 0, 360, 361, 6, 4, 3, 0, 361, 20, 1, 0, 0, 0, 362, 363, 5, 102, 0, 0, 363, 364, 5, 114, 0, 0, 364, 365, 5, 111, 0, 0, 365, 366, 5, 109, 0, 0, 366, 367, 1, 0, 0, 0, 367, 368, 6, 5, 4, 0, 368, 22, 1, 0, 0, 0, 369, 370, 5, 103, 0, 0, 370, 371, 5, 114, 0, 0, 371, 372, 5, 111, 0, 0, 372, 373, 5, 107, 0, 0, 373, 374, 1, 0, 0, 0, 374, 375, 6, 6, 0, 0, 375, 24, 1, 0, 0, 0, 376, 377, 5, 105, 0, 0, 377, 378, 5, 110, 0, 0, 378, 379, 5, 108, 0, 0, 379, 380, 5, 105, 0, 0, 380, 381, 5, 110, 0, 0, 381, 382, 5, 101, 0, 0, 382, 383, 5, 115, 0, 0, 383, 384, 5, 116, 0, 0, 384, 385, 5, 97, 0, 0, 385, 386, 5, 116, 0, 0, 386, 387, 5, 115, 0, 0, 387, 388, 1, 0, 0, 0, 388, 389, 6, 7, 0, 0, 389, 26, 1, 0, 0, 0, 390, 391, 5, 107, 0, 0, 391, 392, 5, 101, 0, 0, 392, 393, 5, 101, 0, 0, 393, 394, 5, 112, 0, 0, 394, 395, 1, 0, 0, 0, 395, 396, 6, 8, 1, 0, 396, 28, 1, 0, 0, 0, 397, 398, 5, 108, 0, 0, 398, 399, 5, 105, 0, 0, 399, 400, 5, 109, 0, 0, 400, 401, 5, 105, 0, 0, 401, 402, 5, 116, 0, 0, 402, 403, 1, 0, 0, 0, 403, 404, 6, 9, 0, 0, 404, 30, 1, 0, 0, 0, 405, 406, 5, 109, 0, 0, 406, 407, 5, 118, 0, 0, 407, 408, 5, 95, 0, 0, 408, 409, 5, 101, 0, 0, 409, 410, 5, 120, 0, 0, 410, 411, 5, 112, 0, 0, 411, 412, 5, 97, 0, 0, 412, 413, 5, 110, 0, 0, 413, 414, 5, 100, 0, 0, 414, 415, 1, 0, 0, 0, 415, 416, 6, 10, 5, 0, 416, 32, 1, 0, 0, 0, 417, 418, 5, 112, 0, 0, 418, 419, 5, 114, 0, 0, 419, 420, 5, 111, 0, 0, 420, 421, 5, 106, 0, 0, 421, 422, 5, 101, 0, 0, 422, 423, 5, 99, 0, 0, 423, 424, 5, 116, 0, 0, 424, 425, 1, 0, 0, 0, 425, 426, 6, 11, 1, 0, 426, 34, 1, 0, 0, 0, 427, 428, 5, 114, 0, 0, 428, 429, 5, 101, 0, 0, 429, 430, 5, 110, 0, 0, 430, 431, 5, 97, 0, 0, 431, 432, 5, 109, 0, 0, 432, 433, 5, 101, 0, 0, 433, 434, 1, 0, 0, 0, 434, 435, 6, 12, 6, 0, 435, 36, 1, 0, 0, 0, 436, 437, 5, 114, 0, 0, 437, 438, 5, 111, 0, 0, 438, 439, 5, 119, 0, 0, 439, 440, 1, 0, 0, 0, 440, 441, 6, 13, 0, 0, 441, 38, 1, 0, 0, 0, 442, 443, 5, 115, 0, 0, 443, 444, 5, 104, 0, 0, 444, 445, 5, 111, 0, 0, 445, 446, 5, 119, 0, 0, 446, 447, 1, 0, 0, 0, 447, 448, 6, 14, 7, 0, 448, 40, 1, 0, 0, 0, 449, 450, 5, 115, 0, 0, 450, 451, 5, 111, 0, 0, 451, 452, 5, 114, 0, 0, 452, 453, 5, 116, 0, 0, 453, 454, 1, 0, 0, 0, 454, 455, 6, 15, 0, 0, 455, 42, 1, 0, 0, 0, 456, 457, 5, 115, 0, 0, 457, 458, 5, 116, 0, 0, 458, 459, 5, 97, 0, 0, 459, 460, 5, 116, 0, 0, 460, 461, 5, 115, 0, 0, 461, 462, 1, 0, 0, 0, 462, 463, 6, 16, 0, 0, 463, 44, 1, 0, 0, 0, 464, 465, 5, 119, 0, 0, 465, 466, 5, 104, 0, 0, 466, 467, 5, 101, 0, 0, 467, 468, 5, 114, 0, 0, 468, 469, 5, 101, 0, 0, 469, 470, 1, 0, 0, 0, 470, 471, 6, 17, 0, 0, 471, 46, 1, 0, 0, 0, 472, 474, 8, 0, 0, 0, 473, 472, 1, 0, 0, 0, 474, 475, 1, 0, 0, 0, 475, 473, 1, 0, 0, 0, 475, 476, 1, 0, 0, 0, 476, 477, 1, 0, 0, 0, 477, 478, 6, 18, 0, 0, 478, 48, 1, 0, 0, 0, 479, 480, 5, 47, 0, 0, 480, 481, 5, 47, 0, 0, 481, 485, 1, 0, 0, 0, 482, 484, 8, 1, 0, 0, 483, 482, 1, 0, 0, 0, 484, 487, 1, 0, 0, 0, 485, 483, 1, 0, 0, 0, 485, 486, 1, 0, 0, 0, 486, 489, 1, 0, 0, 0, 487, 485, 1, 0, 0, 0, 488, 490, 5, 13, 0, 0, 489, 488, 1, 0, 0, 0, 489, 490, 1, 0, 0, 0, 490, 492, 1, 0, 0, 0, 491, 493, 5, 10, 0, 0, 492, 491, 1, 0, 0, 0, 492, 493, 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 495, 6, 19, 8, 0, 495, 50, 1, 0, 0, 0, 496, 497, 5, 47, 0, 0, 497, 498, 5, 42, 0, 0, 498, 503, 1, 0, 0, 0, 499, 502, 3, 51, 20, 0, 500, 502, 9, 0, 0, 0, 501, 499, 1, 0, 0, 0, 501, 500, 1, 0, 0, 0, 502, 505, 1, 0, 0, 0, 503, 504, 1, 0, 0, 0, 503, 501, 1, 0, 0, 0, 504, 506, 1, 0, 0, 0, 505, 503, 1, 0, 0, 0, 506, 507, 5, 42, 0, 0, 507, 508, 5, 47, 0, 0, 508, 509, 1, 0, 0, 0, 509, 510, 6, 20, 8, 0, 510, 52, 1, 0, 0, 0, 511, 513, 7, 2, 0, 0, 512, 511, 1, 0, 0, 0, 513, 514, 1, 0, 0, 0, 514, 512, 1, 0, 0, 0, 514, 515, 1, 0, 0, 0, 515, 516, 1, 0, 0, 0, 516, 517, 6, 21, 8, 0, 517, 54, 1, 0, 0, 0, 518, 519, 3, 161, 75, 0, 519, 520, 1, 0, 0, 0, 520, 521, 6, 22, 9, 0, 521, 522, 6, 22, 10, 0, 522, 56, 1, 0, 0, 0, 523, 524, 3, 65, 27, 0, 524, 525, 1, 0, 0, 0, 525, 526, 6, 23, 11, 0, 526, 527, 6, 23, 12, 0, 527, 58, 1, 0, 0, 0, 528, 529, 3, 53, 21, 0, 529, 530, 1, 0, 0, 0, 530, 531, 6, 24, 8, 0, 531, 60, 1, 0, 0, 0, 532, 533, 3, 49, 19, 0, 533, 534, 1, 0, 0, 0, 534, 535, 6, 25, 8, 0, 535, 62, 1, 0, 0, 0, 536, 537, 3, 51, 20, 0, 537, 538, 1, 0, 0, 0, 538, 539, 6, 26, 8, 0, 539, 64, 1, 0, 0, 0, 540, 541, 5, 124, 0, 0, 541, 542, 1, 0, 0, 0, 542, 543, 6, 27, 12, 0, 543, 66, 1, 0, 0, 0, 544, 545, 7, 3, 0, 0, 545, 68, 1, 0, 0, 0, 546, 547, 7, 4, 0, 0, 547, 70, 1, 0, 0, 0, 548, 549, 5, 92, 0, 0, 549, 550, 7, 5, 0, 0, 550, 72, 1, 0, 0, 0, 551, 552, 8, 6, 0, 0, 552, 74, 1, 0, 0, 0, 553, 555, 7, 7, 0, 0, 554, 556, 7, 8, 0, 0, 555, 554, 1, 0, 0, 0, 555, 556, 1, 0, 0, 0, 556, 558, 1, 0, 0, 0, 557, 559, 3, 67, 28, 0, 558, 557, 1, 0, 0, 0, 559, 560, 1, 0, 0, 0, 560, 558, 1, 0, 0, 0, 560, 561, 1, 0, 0, 0, 561, 76, 1, 0, 0, 0, 562, 563, 5, 64, 0, 0, 563, 78, 1, 0, 0, 0, 564, 565, 5, 96, 0, 0, 565, 80, 1, 0, 0, 0, 566, 570, 8, 9, 0, 0, 567, 568, 5, 96, 0, 0, 568, 570, 5, 96, 0, 0, 569, 566, 1, 0, 0, 0, 569, 567, 1, 0, 0, 0, 570, 82, 1, 0, 0, 0, 571, 572, 5, 95, 0, 0, 572, 84, 1, 0, 0, 0, 573, 577, 3, 69, 29, 0, 574, 577, 3, 67, 28, 0, 575, 577, 3, 83, 36, 0, 576, 573, 1, 0, 0, 0, 576, 574, 1, 0, 0, 0, 576, 575, 1, 0, 0, 0, 577, 86, 1, 0, 0, 0, 578, 583, 5, 34, 0, 0, 579, 582, 3, 71, 30, 0, 580, 582, 3, 73, 31, 0, 581, 579, 1, 0, 0, 0, 581, 580, 1, 0, 0, 0, 582, 585, 1, 0, 0, 0, 583, 581, 1, 0, 0, 0, 583, 584, 1, 0, 0, 0, 584, 586, 1, 0, 0, 0, 585, 583, 1, 0, 0, 0, 586, 608, 5, 34, 0, 0, 587, 588, 5, 34, 0, 0, 588, 589, 5, 34, 0, 0, 589, 590, 5, 34, 0, 0, 590, 594, 1, 0, 0, 0, 591, 593, 8, 1, 0, 0, 592, 591, 1, 0, 0, 0, 593, 596, 1, 0, 0, 0, 594, 595, 1, 0, 0, 0, 594, 592, 1, 0, 0, 0, 595, 597, 1, 0, 0, 0, 596, 594, 1, 0, 0, 0, 597, 598, 5, 34, 0, 0, 598, 599, 5, 34, 0, 0, 599, 600, 5, 34, 0, 0, 600, 602, 1, 0, 0, 0, 601, 603, 5, 34, 0, 0, 602, 601, 1, 0, 0, 0, 602, 603, 1, 0, 0, 0, 603, 605, 1, 0, 0, 0, 604, 606, 5, 34, 0, 0, 605, 604, 1, 0, 0, 0, 605, 606, 1, 0, 0, 0, 606, 608, 1, 0, 0, 0, 607, 578, 1, 0, 0, 0, 607, 587, 1, 0, 0, 0, 608, 88, 1, 0, 0, 0, 609, 611, 3, 67, 28, 0, 610, 609, 1, 0, 0, 0, 611, 612, 1, 0, 0, 0, 612, 610, 1, 0, 0, 0, 612, 613, 1, 0, 0, 0, 613, 90, 1, 0, 0, 0, 614, 616, 3, 67, 28, 0, 615, 614, 1, 0, 0, 0, 616, 617, 1, 0, 0, 0, 617, 615, 1, 0, 0, 0, 617, 618, 1, 0, 0, 0, 618, 619, 1, 0, 0, 0, 619, 623, 3, 105, 47, 0, 620, 622, 3, 67, 28, 0, 621, 620, 1, 0, 0, 0, 622, 625, 1, 0, 0, 0, 623, 621, 1, 0, 0, 0, 623, 624, 1, 0, 0, 0, 624, 657, 1, 0, 0, 0, 625, 623, 1, 0, 0, 0, 626, 628, 3, 105, 47, 0, 627, 629, 3, 67, 28, 0, 628, 627, 1, 0, 0, 0, 629, 630, 1, 0, 0, 0, 630, 628, 1, 0, 0, 0, 630, 631, 1, 0, 0, 0, 631, 657, 1, 0, 0, 0, 632, 634, 3, 67, 28, 0, 633, 632, 1, 0, 0, 0, 634, 635, 1, 0, 0, 0, 635, 633, 1, 0, 0, 0, 635, 636, 1, 0, 0, 0, 636, 644, 1, 0, 0, 0, 637, 641, 3, 105, 47, 0, 638, 640, 3, 67, 28, 0, 639, 638, 1, 0, 0, 0, 640, 643, 1, 0, 0, 0, 641, 639, 1, 0, 0, 0, 641, 642, 1, 0, 0, 0, 642, 645, 1, 0, 0, 0, 643, 641, 1, 0, 0, 0, 644, 637, 1, 0, 0, 0, 644, 645, 1, 0, 0, 0, 645, 646, 1, 0, 0, 0, 646, 647, 3, 75, 32, 0, 647, 657, 1, 0, 0, 0, 648, 650, 3, 105, 47, 0, 649, 651, 3, 67, 28, 0, 650, 649, 1, 0, 0, 0, 651, 652, 1, 0, 0, 0, 652, 650, 1, 0, 0, 0, 652, 653, 1, 0, 0, 0, 653, 654, 1, 0, 0, 0, 654, 655, 3, 75, 32, 0, 655, 657, 1, 0, 0, 0, 656, 615, 1, 0, 0, 0, 656, 626, 1, 0, 0, 0, 656, 633, 1, 0, 0, 0, 656, 648, 1, 0, 0, 0, 657, 92, 1, 0, 0, 0, 658, 659, 5, 98, 0, 0, 659, 660, 5, 121, 0, 0, 660, 94, 1, 0, 0, 0, 661, 662, 5, 97, 0, 0, 662, 663, 5, 110, 0, 0, 663, 664, 5, 100, 0, 0, 664, 96, 1, 0, 0, 0, 665, 666, 5, 97, 0, 0, 666, 667, 5, 115, 0, 0, 667, 668, 5, 99, 0, 0, 668, 98, 1, 0, 0, 0, 669, 670, 5, 61, 0, 0, 670, 100, 1, 0, 0, 0, 671, 672, 5, 44, 0, 0, 672, 102, 1, 0, 0, 0, 673, 674, 5, 100, 0, 0, 674, 675, 5, 101, 0, 0, 675, 676, 5, 115, 0, 0, 676, 677, 5, 99, 0, 0, 677, 104, 1, 0, 0, 0, 678, 679, 5, 46, 0, 0, 679, 106, 1, 0, 0, 0, 680, 681, 5, 102, 0, 0, 681, 682, 5, 97, 0, 0, 682, 683, 5, 108, 0, 0, 683, 684, 5, 115, 0, 0, 684, 685, 5, 101, 0, 0, 685, 108, 1, 0, 0, 0, 686, 687, 5, 102, 0, 0, 687, 688, 5, 105, 0, 0, 688, 689, 5, 114, 0, 0, 689, 690, 5, 115, 0, 0, 690, 691, 5, 116, 0, 0, 691, 110, 1, 0, 0, 0, 692, 693, 5, 108, 0, 0, 693, 694, 5, 97, 0, 0, 694, 695, 5, 115, 0, 0, 695, 696, 5, 116, 0, 0, 696, 112, 1, 0, 0, 0, 697, 698, 5, 40, 0, 0, 698, 114, 1, 0, 0, 0, 699, 700, 5, 105, 0, 0, 700, 701, 5, 110, 0, 0, 701, 116, 1, 0, 0, 0, 702, 703, 5, 105, 0, 0, 703, 704, 5, 115, 0, 0, 704, 118, 1, 0, 0, 0, 705, 706, 5, 108, 0, 0, 706, 707, 5, 105, 0, 0, 707, 708, 5, 107, 0, 0, 708, 709, 5, 101, 0, 0, 709, 120, 1, 0, 0, 0, 710, 711, 5, 110, 0, 0, 711, 712, 5, 111, 0, 0, 712, 713, 5, 116, 0, 0, 713, 122, 1, 0, 0, 0, 714, 715, 5, 110, 0, 0, 715, 716, 5, 117, 0, 0, 716, 717, 5, 108, 0, 0, 717, 718, 5, 108, 0, 0, 718, 124, 1, 0, 0, 0, 719, 720, 5, 110, 0, 0, 720, 721, 5, 117, 0, 0, 721, 722, 5, 108, 0, 0, 722, 723, 5, 108, 0, 0, 723, 724, 5, 115, 0, 0, 724, 126, 1, 0, 0, 0, 725, 726, 5, 111, 0, 0, 726, 727, 5, 114, 0, 0, 727, 128, 1, 0, 0, 0, 728, 729, 5, 63, 0, 0, 729, 130, 1, 0, 0, 0, 730, 731, 5, 114, 0, 0, 731, 732, 5, 108, 0, 0, 732, 733, 5, 105, 0, 0, 733, 734, 5, 107, 0, 0, 734, 735, 5, 101, 0, 0, 735, 132, 1, 0, 0, 0, 736, 737, 5, 41, 0, 0, 737, 134, 1, 0, 0, 0, 738, 739, 5, 116, 0, 0, 739, 740, 5, 114, 0, 0, 740, 741, 5, 117, 0, 0, 741, 742, 5, 101, 0, 0, 742, 136, 1, 0, 0, 0, 743, 744, 5, 61, 0, 0, 744, 745, 5, 61, 0, 0, 745, 138, 1, 0, 0, 0, 746, 747, 5, 61, 0, 0, 747, 748, 5, 126, 0, 0, 748, 140, 1, 0, 0, 0, 749, 750, 5, 33, 0, 0, 750, 751, 5, 61, 0, 0, 751, 142, 1, 0, 0, 0, 752, 753, 5, 60, 0, 0, 753, 144, 1, 0, 0, 0, 754, 755, 5, 60, 0, 0, 755, 756, 5, 61, 0, 0, 756, 146, 1, 0, 0, 0, 757, 758, 5, 62, 0, 0, 758, 148, 1, 0, 0, 0, 759, 760, 5, 62, 0, 0, 760, 761, 5, 61, 0, 0, 761, 150, 1, 0, 0, 0, 762, 763, 5, 43, 0, 0, 763, 152, 1, 0, 0, 0, 764, 765, 5, 45, 0, 0, 765, 154, 1, 0, 0, 0, 766, 767, 5, 42, 0, 0, 767, 156, 1, 0, 0, 0, 768, 769, 5, 47, 0, 0, 769, 158, 1, 0, 0, 0, 770, 771, 5, 37, 0, 0, 771, 160, 1, 0, 0, 0, 772, 773, 5, 91, 0, 0, 773, 774, 1, 0, 0, 0, 774, 775, 6, 75, 0, 0, 775, 776, 6, 75, 0, 0, 776, 162, 1, 0, 0, 0, 777, 778, 5, 93, 0, 0, 778, 779, 1, 0, 0, 0, 779, 780, 6, 76, 12, 0, 780, 781, 6, 76, 12, 0, 781, 164, 1, 0, 0, 0, 782, 786, 3, 69, 29, 0, 783, 785, 3, 85, 37, 0, 784, 783, 1, 0, 0, 0, 785, 788, 1, 0, 0, 0, 786, 784, 1, 0, 0, 0, 786, 787, 1, 0, 0, 0, 787, 799, 1, 0, 0, 0, 788, 786, 1, 0, 0, 0, 789, 792, 3, 83, 36, 0, 790, 792, 3, 77, 33, 0, 791, 789, 1, 0, 0, 0, 791, 790, 1, 0, 0, 0, 792, 794, 1, 0, 0, 0, 793, 795, 3, 85, 37, 0, 794, 793, 1, 0, 0, 0, 795, 796, 1, 0, 0, 0, 796, 794, 1, 0, 0, 0, 796, 797, 1, 0, 0, 0, 797, 799, 1, 0, 0, 0, 798, 782, 1, 0, 0, 0, 798, 791, 1, 0, 0, 0, 799, 166, 1, 0, 0, 0, 800, 802, 3, 79, 34, 0, 801, 803, 3, 81, 35, 0, 802, 801, 1, 0, 0, 0, 803, 804, 1, 0, 0, 0, 804, 802, 1, 0, 0, 0, 804, 805, 1, 0, 0, 0, 805, 806, 1, 0, 0, 0, 806, 807, 3, 79, 34, 0, 807, 168, 1, 0, 0, 0, 808, 809, 3, 49, 19, 0, 809, 810, 1, 0, 0, 0, 810, 811, 6, 79, 8, 0, 811, 170, 1, 0, 0, 0, 812, 813, 3, 51, 20, 0, 813, 814, 1, 0, 0, 0, 814, 815, 6, 80, 8, 0, 815, 172, 1, 0, 0, 0, 816, 817, 3, 53, 21, 0, 817, 818, 1, 0, 0, 0, 818, 819, 6, 81, 8, 0, 819, 174, 1, 0, 0, 0, 820, 821, 3, 65, 27, 0, 821, 822, 1, 0, 0, 0, 822, 823, 6, 82, 11, 0, 823, 824, 6, 82, 12, 0, 824, 176, 1, 0, 0, 0, 825, 826, 3, 161, 75, 0, 826, 827, 1, 0, 0, 0, 827, 828, 6, 83, 9, 0, 828, 178, 1, 0, 0, 0, 829, 830, 3, 163, 76, 0, 830, 831, 1, 0, 0, 0, 831, 832, 6, 84, 13, 0, 832, 180, 1, 0, 0, 0, 833, 834, 3, 101, 45, 0, 834, 835, 1, 0, 0, 0, 835, 836, 6, 85, 14, 0, 836, 182, 1, 0, 0, 0, 837, 838, 3, 99, 44, 0, 838, 839, 1, 0, 0, 0, 839, 840, 6, 86, 15, 0, 840, 184, 1, 0, 0, 0, 841, 842, 5, 109, 0, 0, 842, 843, 5, 101, 0, 0, 843, 844, 5, 116, 0, 0, 844, 845, 5, 97, 0, 0, 845, 846, 5, 100, 0, 0, 846, 847, 5, 97, 0, 0, 847, 848, 5, 116, 0, 0, 848, 849, 5, 97, 0, 0, 849, 186, 1, 0, 0, 0, 850, 854, 8, 10, 0, 0, 851, 852, 5, 47, 0, 0, 852, 854, 8, 11, 0, 0, 853, 850, 1, 0, 0, 0, 853, 851, 1, 0, 0, 0, 854, 188, 1, 0, 0, 0, 855, 857, 3, 187, 88, 0, 856, 855, 1, 0, 0, 0, 857, 858, 1, 0, 0, 0, 858, 856, 1, 0, 0, 0, 858, 859, 1, 0, 0, 0, 859, 190, 1, 0, 0, 0, 860, 861, 3, 167, 78, 0, 861, 862, 1, 0, 0, 0, 862, 863, 6, 90, 16, 0, 863, 192, 1, 0, 0, 0, 864, 865, 3, 49, 19, 0, 865, 866, 1, 0, 0, 0, 866, 867, 6, 91, 8, 0, 867, 194, 1, 0, 0, 0, 868, 869, 3, 51, 20, 0, 869, 870, 1, 0, 0, 0, 870, 871, 6, 92, 8, 0, 871, 196, 1, 0, 0, 0, 872, 873, 3, 53, 21, 0, 873, 874, 1, 0, 0, 0, 874, 875, 6, 93, 8, 0, 875, 198, 1, 0, 0, 0, 876, 877, 3, 65, 27, 0, 877, 878, 1, 0, 0, 0, 878, 879, 6, 94, 11, 0, 879, 880, 6, 94, 12, 0, 880, 200, 1, 0, 0, 0, 881, 882, 3, 105, 47, 0, 882, 883, 1, 0, 0, 0, 883, 884, 6, 95, 17, 0, 884, 202, 1, 0, 0, 0, 885, 886, 3, 101, 45, 0, 886, 887, 1, 0, 0, 0, 887, 888, 6, 96, 14, 0, 888, 204, 1, 0, 0, 0, 889, 894, 3, 69, 29, 0, 890, 894, 3, 67, 28, 0, 891, 894, 3, 83, 36, 0, 892, 894, 3, 155, 72, 0, 893, 889, 1, 0, 0, 0, 893, 890, 1, 0, 0, 0, 893, 891, 1, 0, 0, 0, 893, 892, 1, 0, 0, 0, 894, 206, 1, 0, 0, 0, 895, 898, 3, 69, 29, 0, 896, 898, 3, 155, 72, 0, 897, 895, 1, 0, 0, 0, 897, 896, 1, 0, 0, 0, 898, 902, 1, 0, 0, 0, 899, 901, 3, 205, 97, 0, 900, 899, 1, 0, 0, 0, 901, 904, 1, 0, 0, 0, 902, 900, 1, 0, 0, 0, 902, 903, 1, 0, 0, 0, 903, 915, 1, 0, 0, 0, 904, 902, 1, 0, 0, 0, 905, 908, 3, 83, 36, 0, 906, 908, 3, 77, 33, 0, 907, 905, 1, 0, 0, 0, 907, 906, 1, 0, 0, 0, 908, 910, 1, 0, 0, 0, 909, 911, 3, 205, 97, 0, 910, 909, 1, 0, 0, 0, 911, 912, 1, 0, 0, 0, 912, 910, 1, 0, 0, 0, 912, 913, 1, 0, 0, 0, 913, 915, 1, 0, 0, 0, 914, 897, 1, 0, 0, 0, 914, 907, 1, 0, 0, 0, 915, 208, 1, 0, 0, 0, 916, 917, 3, 207, 98, 0, 917, 918, 1, 0, 0, 0, 918, 919, 6, 99, 18, 0, 919, 210, 1, 0, 0, 0, 920, 921, 3, 167, 78, 0, 921, 922, 1, 0, 0, 0, 922, 923, 6, 100, 16, 0, 923, 212, 1, 0, 0, 0, 924, 925, 3, 49, 19, 0, 925, 926, 1, 0, 0, 0, 926, 927, 6, 101, 8, 0, 927, 214, 1, 0, 0, 0, 928, 929, 3, 51, 20, 0, 929, 930, 1, 0, 0, 0, 930, 931, 6, 102, 8, 0, 931, 216, 1, 0, 0, 0, 932, 933, 3, 53, 21, 0, 933, 934, 1, 0, 0, 0, 934, 935, 6, 103, 8, 0, 935, 218, 1, 0, 0, 0, 936, 937, 3, 65, 27, 0, 937, 938, 1, 0, 0, 0, 938, 939, 6, 104, 11, 0, 939, 940, 6, 104, 12, 0, 940, 220, 1, 0, 0, 0, 941, 942, 3, 99, 44, 0, 942, 943, 1, 0, 0, 0, 943, 944, 6, 105, 15, 0, 944, 222, 1, 0, 0, 0, 945, 946, 3, 101, 45, 0, 946, 947, 1, 0, 0, 0, 947, 948, 6, 106, 14, 0, 948, 224, 1, 0, 0, 0, 949, 950, 3, 105, 47, 0, 950, 951, 1, 0, 0, 0, 951, 952, 6, 107, 17, 0, 952, 226, 1, 0, 0, 0, 953, 954, 5, 97, 0, 0, 954, 955, 5, 115, 0, 0, 955, 228, 1, 0, 0, 0, 956, 957, 3, 167, 78, 0, 957, 958, 1, 0, 0, 0, 958, 959, 6, 109, 16, 0, 959, 230, 1, 0, 0, 0, 960, 961, 3, 207, 98, 0, 961, 962, 1, 0, 0, 0, 962, 963, 6, 110, 18, 0, 963, 232, 1, 0, 0, 0, 964, 965, 3, 49, 19, 0, 965, 966, 1, 0, 0, 0, 966, 967, 6, 111, 8, 0, 967, 234, 1, 0, 0, 0, 968, 969, 3, 51, 20, 0, 969, 970, 1, 0, 0, 0, 970, 971, 6, 112, 8, 0, 971, 236, 1, 0, 0, 0, 972, 973, 3, 53, 21, 0, 973, 974, 1, 0, 0, 0, 974, 975, 6, 113, 8, 0, 975, 238, 1, 0, 0, 0, 976, 977, 3, 65, 27, 0, 977, 978, 1, 0, 0, 0, 978, 979, 6, 114, 11, 0, 979, 980, 6, 114, 12, 0, 980, 240, 1, 0, 0, 0, 981, 982, 3, 161, 75, 0, 982, 983, 1, 0, 0, 0, 983, 984, 6, 115, 9, 0, 984, 985, 6, 115, 19, 0, 985, 242, 1, 0, 0, 0, 986, 987, 5, 111, 0, 0, 987, 988, 5, 110, 0, 0, 988, 989, 1, 0, 0, 0, 989, 990, 6, 116, 20, 0, 990, 244, 1, 0, 0, 0, 991, 992, 5, 119, 0, 0, 992, 993, 5, 105, 0, 0, 993, 994, 5, 116, 0, 0, 994, 995, 5, 104, 0, 0, 995, 996, 1, 0, 0, 0, 996, 997, 6, 117, 20, 0, 997, 246, 1, 0, 0, 0, 998, 999, 8, 12, 0, 0, 999, 248, 1, 0, 0, 0, 1000, 1003, 3, 69, 29, 0, 1001, 1003, 3, 67, 28, 0, 1002, 1000, 1, 0, 0, 0, 1002, 1001, 1, 0, 0, 0, 1003, 1007, 1, 0, 0, 0, 1004, 1006, 3, 247, 118, 0, 1005, 1004, 1, 0, 0, 0, 1006, 1009, 1, 0, 0, 0, 1007, 1005, 1, 0, 0, 0, 1007, 1008, 1, 0, 0, 0, 1008, 250, 1, 0, 0, 0, 1009, 1007, 1, 0, 0, 0, 1010, 1011, 3, 167, 78, 0, 1011, 1012, 1, 0, 0, 0, 1012, 1013, 6, 120, 16, 0, 1013, 252, 1, 0, 0, 0, 1014, 1015, 3, 249, 119, 0, 1015, 1016, 1, 0, 0, 0, 1016, 1017, 6, 121, 21, 0, 1017, 254, 1, 0, 0, 0, 1018, 1019, 3, 49, 19, 0, 1019, 1020, 1, 0, 0, 0, 1020, 1021, 6, 122, 8, 0, 1021, 256, 1, 0, 0, 0, 1022, 1023, 3, 51, 20, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1025, 6, 123, 8, 0, 1025, 258, 1, 0, 0, 0, 1026, 1027, 3, 53, 21, 0, 1027, 1028, 1, 0, 0, 0, 1028, 1029, 6, 124, 8, 0, 1029, 260, 1, 0, 0, 0, 1030, 1031, 3, 65, 27, 0, 1031, 1032, 1, 0, 0, 0, 1032, 1033, 6, 125, 11, 0, 1033, 1034, 6, 125, 12, 0, 1034, 1035, 6, 125, 12, 0, 1035, 262, 1, 0, 0, 0, 1036, 1037, 3, 99, 44, 0, 1037, 1038, 1, 0, 0, 0, 1038, 1039, 6, 126, 15, 0, 1039, 264, 1, 0, 0, 0, 1040, 1041, 3, 101, 45, 0, 1041, 1042, 1, 0, 0, 0, 1042, 1043, 6, 127, 14, 0, 1043, 266, 1, 0, 0, 0, 1044, 1045, 3, 105, 47, 0, 1045, 1046, 1, 0, 0, 0, 1046, 1047, 6, 128, 17, 0, 1047, 268, 1, 0, 0, 0, 1048, 1049, 3, 245, 117, 0, 1049, 1050, 1, 0, 0, 0, 1050, 1051, 6, 129, 22, 0, 1051, 270, 1, 0, 0, 0, 1052, 1053, 3, 207, 98, 0, 1053, 1054, 1, 0, 0, 0, 1054, 1055, 6, 130, 18, 0, 1055, 272, 1, 0, 0, 0, 1056, 1057, 3, 167, 78, 0, 1057, 1058, 1, 0, 0, 0, 1058, 1059, 6, 131, 16, 0, 1059, 274, 1, 0, 0, 0, 1060, 1061, 3, 49, 19, 0, 1061, 1062, 1, 0, 0, 0, 1062, 1063, 6, 132, 8, 0, 1063, 276, 1, 0, 0, 0, 1064, 1065, 3, 51, 20, 0, 1065, 1066, 1, 0, 0, 0, 1066, 1067, 6, 133, 8, 0, 1067, 278, 1, 0, 0, 0, 1068, 1069, 3, 53, 21, 0, 1069, 1070, 1, 0, 0, 0, 1070, 1071, 6, 134, 8, 0, 1071, 280, 1, 0, 0, 0, 1072, 1073, 3, 65, 27, 0, 1073, 1074, 1, 0, 0, 0, 1074, 1075, 6, 135, 11, 0, 1075, 1076, 6, 135, 12, 0, 1076, 282, 1, 0, 0, 0, 1077, 1078, 3, 105, 47, 0, 1078, 1079, 1, 0, 0, 0, 1079, 1080, 6, 136, 17, 0, 1080, 284, 1, 0, 0, 0, 1081, 1082, 3, 167, 78, 0, 1082, 1083, 1, 0, 0, 0, 1083, 1084, 6, 137, 16, 0, 1084, 286, 1, 0, 0, 0, 1085, 1086, 3, 165, 77, 0, 1086, 1087, 1, 0, 0, 0, 1087, 1088, 6, 138, 23, 0, 1088, 288, 1, 0, 0, 0, 1089, 1090, 3, 49, 19, 0, 1090, 1091, 1, 0, 0, 0, 1091, 1092, 6, 139, 8, 0, 1092, 290, 1, 0, 0, 0, 1093, 1094, 3, 51, 20, 0, 1094, 1095, 1, 0, 0, 0, 1095, 1096, 6, 140, 8, 0, 1096, 292, 1, 0, 0, 0, 1097, 1098, 3, 53, 21, 0, 1098, 1099, 1, 0, 0, 0, 1099, 1100, 6, 141, 8, 0, 1100, 294, 1, 0, 0, 0, 1101, 1102, 3, 65, 27, 0, 1102, 1103, 1, 0, 0, 0, 1103, 1104, 6, 142, 11, 0, 1104, 1105, 6, 142, 12, 0, 1105, 296, 1, 0, 0, 0, 1106, 1107, 5, 105, 0, 0, 1107, 1108, 5, 110, 0, 0, 1108, 1109, 5, 102, 0, 0, 1109, 1110, 5, 111, 0, 0, 1110, 298, 1, 0, 0, 0, 1111, 1112, 5, 102, 0, 0, 1112, 1113, 5, 117, 0, 0, 1113, 1114, 5, 110, 0, 0, 1114, 1115, 5, 99, 0, 0, 1115, 1116, 5, 116, 0, 0, 1116, 1117, 5, 105, 0, 0, 1117, 1118, 5, 111, 0, 0, 1118, 1119, 5, 110, 0, 0, 1119, 1120, 5, 115, 0, 0, 1120, 300, 1, 0, 0, 0, 1121, 1122, 3, 49, 19, 0, 1122, 1123, 1, 0, 0, 0, 1123, 1124, 6, 145, 8, 0, 1124, 302, 1, 0, 0, 0, 1125, 1126, 3, 51, 20, 0, 1126, 1127, 1, 0, 0, 0, 1127, 1128, 6, 146, 8, 0, 1128, 304, 1, 0, 0, 0, 1129, 1130, 3, 53, 21, 0, 1130, 1131, 1, 0, 0, 0, 1131, 1132, 6, 147, 8, 0, 1132, 306, 1, 0, 0, 0, 1133, 1134, 3, 163, 76, 0, 1134, 1135, 1, 0, 0, 0, 1135, 1136, 6, 148, 13, 0, 1136, 1137, 6, 148, 12, 0, 1137, 308, 1, 0, 0, 0, 1138, 1139, 5, 58, 0, 0, 1139, 310, 1, 0, 0, 0, 1140, 1146, 3, 77, 33, 0, 1141, 1146, 3, 67, 28, 0, 1142, 1146, 3, 105, 47, 0, 1143, 1146, 3, 69, 29, 0, 1144, 1146, 3, 83, 36, 0, 1145, 1140, 1, 0, 0, 0, 1145, 1141, 1, 0, 0, 0, 1145, 1142, 1, 0, 0, 0, 1145, 1143, 1, 0, 0, 0, 1145, 1144, 1, 0, 0, 0, 1146, 1147, 1, 0, 0, 0, 1147, 1145, 1, 0, 0, 0, 1147, 1148, 1, 0, 0, 0, 1148, 312, 1, 0, 0, 0, 1149, 1150, 3, 49, 19, 0, 1150, 1151, 1, 0, 0, 0, 1151, 1152, 6, 151, 8, 0, 1152, 314, 1, 0, 0, 0, 1153, 1154, 3, 51, 20, 0, 1154, 1155, 1, 0, 0, 0, 1155, 1156, 6, 152, 8, 0, 1156, 316, 1, 0, 0, 0, 1157, 1158, 3, 53, 21, 0, 1158, 1159, 1, 0, 0, 0, 1159, 1160, 6, 153, 8, 0, 1160, 318, 1, 0, 0, 0, 54, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 475, 485, 489, 492, 501, 503, 514, 555, 560, 569, 576, 581, 583, 594, 602, 605, 607, 612, 617, 623, 630, 635, 641, 644, 652, 656, 786, 791, 796, 798, 804, 853, 858, 893, 897, 902, 907, 912, 914, 1002, 1007, 1145, 1147, 24, 5, 2, 0, 5, 4, 0, 5, 6, 0, 5, 1, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 0, 1, 0, 7, 64, 0, 5, 0, 0, 7, 26, 0, 4, 0, 0, 7, 65, 0, 7, 34, 0, 7, 33, 0, 7, 67, 0, 7, 36, 0, 7, 76, 0, 5, 10, 0, 5, 7, 0, 7, 86, 0, 7, 85, 0, 7, 66, 0] \ No newline at end of file +[4, 0, 104, 1149, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 4, 17, 462, 8, 17, 11, 17, 12, 17, 463, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 5, 18, 472, 8, 18, 10, 18, 12, 18, 475, 9, 18, 1, 18, 3, 18, 478, 8, 18, 1, 18, 3, 18, 481, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 490, 8, 19, 10, 19, 12, 19, 493, 9, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 4, 20, 501, 8, 20, 11, 20, 12, 20, 502, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 3, 31, 544, 8, 31, 1, 31, 4, 31, 547, 8, 31, 11, 31, 12, 31, 548, 1, 32, 1, 32, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 3, 34, 558, 8, 34, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 3, 36, 565, 8, 36, 1, 37, 1, 37, 1, 37, 5, 37, 570, 8, 37, 10, 37, 12, 37, 573, 9, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 5, 37, 581, 8, 37, 10, 37, 12, 37, 584, 9, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 3, 37, 591, 8, 37, 1, 37, 3, 37, 594, 8, 37, 3, 37, 596, 8, 37, 1, 38, 4, 38, 599, 8, 38, 11, 38, 12, 38, 600, 1, 39, 4, 39, 604, 8, 39, 11, 39, 12, 39, 605, 1, 39, 1, 39, 5, 39, 610, 8, 39, 10, 39, 12, 39, 613, 9, 39, 1, 39, 1, 39, 4, 39, 617, 8, 39, 11, 39, 12, 39, 618, 1, 39, 4, 39, 622, 8, 39, 11, 39, 12, 39, 623, 1, 39, 1, 39, 5, 39, 628, 8, 39, 10, 39, 12, 39, 631, 9, 39, 3, 39, 633, 8, 39, 1, 39, 1, 39, 1, 39, 1, 39, 4, 39, 639, 8, 39, 11, 39, 12, 39, 640, 1, 39, 1, 39, 3, 39, 645, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 5, 76, 773, 8, 76, 10, 76, 12, 76, 776, 9, 76, 1, 76, 1, 76, 3, 76, 780, 8, 76, 1, 76, 4, 76, 783, 8, 76, 11, 76, 12, 76, 784, 3, 76, 787, 8, 76, 1, 77, 1, 77, 4, 77, 791, 8, 77, 11, 77, 12, 77, 792, 1, 77, 1, 77, 1, 78, 1, 78, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 86, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 3, 87, 842, 8, 87, 1, 88, 4, 88, 845, 8, 88, 11, 88, 12, 88, 846, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 3, 96, 882, 8, 96, 1, 97, 1, 97, 3, 97, 886, 8, 97, 1, 97, 5, 97, 889, 8, 97, 10, 97, 12, 97, 892, 9, 97, 1, 97, 1, 97, 3, 97, 896, 8, 97, 1, 97, 4, 97, 899, 8, 97, 11, 97, 12, 97, 900, 3, 97, 903, 8, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 118, 1, 118, 3, 118, 991, 8, 118, 1, 118, 5, 118, 994, 8, 118, 10, 118, 12, 118, 997, 9, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 143, 1, 143, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 4, 149, 1134, 8, 149, 11, 149, 12, 149, 1135, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 2, 491, 582, 0, 153, 11, 1, 13, 2, 15, 3, 17, 4, 19, 5, 21, 6, 23, 7, 25, 8, 27, 9, 29, 10, 31, 11, 33, 12, 35, 13, 37, 14, 39, 15, 41, 16, 43, 17, 45, 18, 47, 19, 49, 20, 51, 21, 53, 0, 55, 0, 57, 22, 59, 23, 61, 24, 63, 25, 65, 0, 67, 0, 69, 0, 71, 0, 73, 0, 75, 0, 77, 0, 79, 0, 81, 0, 83, 0, 85, 26, 87, 27, 89, 28, 91, 29, 93, 30, 95, 31, 97, 32, 99, 33, 101, 34, 103, 35, 105, 36, 107, 37, 109, 38, 111, 39, 113, 40, 115, 41, 117, 42, 119, 43, 121, 44, 123, 45, 125, 46, 127, 47, 129, 48, 131, 49, 133, 50, 135, 51, 137, 52, 139, 53, 141, 54, 143, 55, 145, 56, 147, 57, 149, 58, 151, 59, 153, 60, 155, 61, 157, 62, 159, 63, 161, 64, 163, 65, 165, 66, 167, 67, 169, 68, 171, 69, 173, 0, 175, 0, 177, 0, 179, 0, 181, 0, 183, 70, 185, 0, 187, 71, 189, 0, 191, 72, 193, 73, 195, 74, 197, 0, 199, 0, 201, 0, 203, 0, 205, 75, 207, 0, 209, 0, 211, 76, 213, 77, 215, 78, 217, 0, 219, 0, 221, 0, 223, 0, 225, 79, 227, 0, 229, 0, 231, 80, 233, 81, 235, 82, 237, 0, 239, 0, 241, 83, 243, 84, 245, 0, 247, 85, 249, 0, 251, 0, 253, 86, 255, 87, 257, 88, 259, 0, 261, 0, 263, 0, 265, 0, 267, 0, 269, 0, 271, 0, 273, 89, 275, 90, 277, 91, 279, 0, 281, 0, 283, 0, 285, 0, 287, 92, 289, 93, 291, 94, 293, 0, 295, 95, 297, 96, 299, 97, 301, 98, 303, 99, 305, 0, 307, 100, 309, 101, 311, 102, 313, 103, 315, 104, 11, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 13, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 5, 0, 34, 34, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 10, 0, 9, 10, 13, 13, 32, 32, 44, 44, 47, 47, 61, 61, 91, 91, 93, 93, 96, 96, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1176, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 1, 53, 1, 0, 0, 0, 1, 55, 1, 0, 0, 0, 1, 57, 1, 0, 0, 0, 1, 59, 1, 0, 0, 0, 1, 61, 1, 0, 0, 0, 2, 63, 1, 0, 0, 0, 2, 85, 1, 0, 0, 0, 2, 87, 1, 0, 0, 0, 2, 89, 1, 0, 0, 0, 2, 91, 1, 0, 0, 0, 2, 93, 1, 0, 0, 0, 2, 95, 1, 0, 0, 0, 2, 97, 1, 0, 0, 0, 2, 99, 1, 0, 0, 0, 2, 101, 1, 0, 0, 0, 2, 103, 1, 0, 0, 0, 2, 105, 1, 0, 0, 0, 2, 107, 1, 0, 0, 0, 2, 109, 1, 0, 0, 0, 2, 111, 1, 0, 0, 0, 2, 113, 1, 0, 0, 0, 2, 115, 1, 0, 0, 0, 2, 117, 1, 0, 0, 0, 2, 119, 1, 0, 0, 0, 2, 121, 1, 0, 0, 0, 2, 123, 1, 0, 0, 0, 2, 125, 1, 0, 0, 0, 2, 127, 1, 0, 0, 0, 2, 129, 1, 0, 0, 0, 2, 131, 1, 0, 0, 0, 2, 133, 1, 0, 0, 0, 2, 135, 1, 0, 0, 0, 2, 137, 1, 0, 0, 0, 2, 139, 1, 0, 0, 0, 2, 141, 1, 0, 0, 0, 2, 143, 1, 0, 0, 0, 2, 145, 1, 0, 0, 0, 2, 147, 1, 0, 0, 0, 2, 149, 1, 0, 0, 0, 2, 151, 1, 0, 0, 0, 2, 153, 1, 0, 0, 0, 2, 155, 1, 0, 0, 0, 2, 157, 1, 0, 0, 0, 2, 159, 1, 0, 0, 0, 2, 161, 1, 0, 0, 0, 2, 163, 1, 0, 0, 0, 2, 165, 1, 0, 0, 0, 2, 167, 1, 0, 0, 0, 2, 169, 1, 0, 0, 0, 2, 171, 1, 0, 0, 0, 3, 173, 1, 0, 0, 0, 3, 175, 1, 0, 0, 0, 3, 177, 1, 0, 0, 0, 3, 179, 1, 0, 0, 0, 3, 181, 1, 0, 0, 0, 3, 183, 1, 0, 0, 0, 3, 187, 1, 0, 0, 0, 3, 189, 1, 0, 0, 0, 3, 191, 1, 0, 0, 0, 3, 193, 1, 0, 0, 0, 3, 195, 1, 0, 0, 0, 4, 197, 1, 0, 0, 0, 4, 199, 1, 0, 0, 0, 4, 201, 1, 0, 0, 0, 4, 205, 1, 0, 0, 0, 4, 207, 1, 0, 0, 0, 4, 209, 1, 0, 0, 0, 4, 211, 1, 0, 0, 0, 4, 213, 1, 0, 0, 0, 4, 215, 1, 0, 0, 0, 5, 217, 1, 0, 0, 0, 5, 219, 1, 0, 0, 0, 5, 221, 1, 0, 0, 0, 5, 223, 1, 0, 0, 0, 5, 225, 1, 0, 0, 0, 5, 227, 1, 0, 0, 0, 5, 229, 1, 0, 0, 0, 5, 231, 1, 0, 0, 0, 5, 233, 1, 0, 0, 0, 5, 235, 1, 0, 0, 0, 6, 237, 1, 0, 0, 0, 6, 239, 1, 0, 0, 0, 6, 241, 1, 0, 0, 0, 6, 243, 1, 0, 0, 0, 6, 247, 1, 0, 0, 0, 6, 249, 1, 0, 0, 0, 6, 251, 1, 0, 0, 0, 6, 253, 1, 0, 0, 0, 6, 255, 1, 0, 0, 0, 6, 257, 1, 0, 0, 0, 7, 259, 1, 0, 0, 0, 7, 261, 1, 0, 0, 0, 7, 263, 1, 0, 0, 0, 7, 265, 1, 0, 0, 0, 7, 267, 1, 0, 0, 0, 7, 269, 1, 0, 0, 0, 7, 271, 1, 0, 0, 0, 7, 273, 1, 0, 0, 0, 7, 275, 1, 0, 0, 0, 7, 277, 1, 0, 0, 0, 8, 279, 1, 0, 0, 0, 8, 281, 1, 0, 0, 0, 8, 283, 1, 0, 0, 0, 8, 285, 1, 0, 0, 0, 8, 287, 1, 0, 0, 0, 8, 289, 1, 0, 0, 0, 8, 291, 1, 0, 0, 0, 9, 293, 1, 0, 0, 0, 9, 295, 1, 0, 0, 0, 9, 297, 1, 0, 0, 0, 9, 299, 1, 0, 0, 0, 9, 301, 1, 0, 0, 0, 9, 303, 1, 0, 0, 0, 10, 305, 1, 0, 0, 0, 10, 307, 1, 0, 0, 0, 10, 309, 1, 0, 0, 0, 10, 311, 1, 0, 0, 0, 10, 313, 1, 0, 0, 0, 10, 315, 1, 0, 0, 0, 11, 317, 1, 0, 0, 0, 13, 327, 1, 0, 0, 0, 15, 334, 1, 0, 0, 0, 17, 343, 1, 0, 0, 0, 19, 350, 1, 0, 0, 0, 21, 360, 1, 0, 0, 0, 23, 367, 1, 0, 0, 0, 25, 374, 1, 0, 0, 0, 27, 388, 1, 0, 0, 0, 29, 395, 1, 0, 0, 0, 31, 403, 1, 0, 0, 0, 33, 415, 1, 0, 0, 0, 35, 424, 1, 0, 0, 0, 37, 430, 1, 0, 0, 0, 39, 437, 1, 0, 0, 0, 41, 444, 1, 0, 0, 0, 43, 452, 1, 0, 0, 0, 45, 461, 1, 0, 0, 0, 47, 467, 1, 0, 0, 0, 49, 484, 1, 0, 0, 0, 51, 500, 1, 0, 0, 0, 53, 506, 1, 0, 0, 0, 55, 511, 1, 0, 0, 0, 57, 516, 1, 0, 0, 0, 59, 520, 1, 0, 0, 0, 61, 524, 1, 0, 0, 0, 63, 528, 1, 0, 0, 0, 65, 532, 1, 0, 0, 0, 67, 534, 1, 0, 0, 0, 69, 536, 1, 0, 0, 0, 71, 539, 1, 0, 0, 0, 73, 541, 1, 0, 0, 0, 75, 550, 1, 0, 0, 0, 77, 552, 1, 0, 0, 0, 79, 557, 1, 0, 0, 0, 81, 559, 1, 0, 0, 0, 83, 564, 1, 0, 0, 0, 85, 595, 1, 0, 0, 0, 87, 598, 1, 0, 0, 0, 89, 644, 1, 0, 0, 0, 91, 646, 1, 0, 0, 0, 93, 649, 1, 0, 0, 0, 95, 653, 1, 0, 0, 0, 97, 657, 1, 0, 0, 0, 99, 659, 1, 0, 0, 0, 101, 661, 1, 0, 0, 0, 103, 666, 1, 0, 0, 0, 105, 668, 1, 0, 0, 0, 107, 674, 1, 0, 0, 0, 109, 680, 1, 0, 0, 0, 111, 685, 1, 0, 0, 0, 113, 687, 1, 0, 0, 0, 115, 690, 1, 0, 0, 0, 117, 693, 1, 0, 0, 0, 119, 698, 1, 0, 0, 0, 121, 702, 1, 0, 0, 0, 123, 707, 1, 0, 0, 0, 125, 713, 1, 0, 0, 0, 127, 716, 1, 0, 0, 0, 129, 718, 1, 0, 0, 0, 131, 724, 1, 0, 0, 0, 133, 726, 1, 0, 0, 0, 135, 731, 1, 0, 0, 0, 137, 734, 1, 0, 0, 0, 139, 737, 1, 0, 0, 0, 141, 740, 1, 0, 0, 0, 143, 742, 1, 0, 0, 0, 145, 745, 1, 0, 0, 0, 147, 747, 1, 0, 0, 0, 149, 750, 1, 0, 0, 0, 151, 752, 1, 0, 0, 0, 153, 754, 1, 0, 0, 0, 155, 756, 1, 0, 0, 0, 157, 758, 1, 0, 0, 0, 159, 760, 1, 0, 0, 0, 161, 765, 1, 0, 0, 0, 163, 786, 1, 0, 0, 0, 165, 788, 1, 0, 0, 0, 167, 796, 1, 0, 0, 0, 169, 800, 1, 0, 0, 0, 171, 804, 1, 0, 0, 0, 173, 808, 1, 0, 0, 0, 175, 813, 1, 0, 0, 0, 177, 817, 1, 0, 0, 0, 179, 821, 1, 0, 0, 0, 181, 825, 1, 0, 0, 0, 183, 829, 1, 0, 0, 0, 185, 841, 1, 0, 0, 0, 187, 844, 1, 0, 0, 0, 189, 848, 1, 0, 0, 0, 191, 852, 1, 0, 0, 0, 193, 856, 1, 0, 0, 0, 195, 860, 1, 0, 0, 0, 197, 864, 1, 0, 0, 0, 199, 869, 1, 0, 0, 0, 201, 873, 1, 0, 0, 0, 203, 881, 1, 0, 0, 0, 205, 902, 1, 0, 0, 0, 207, 904, 1, 0, 0, 0, 209, 908, 1, 0, 0, 0, 211, 912, 1, 0, 0, 0, 213, 916, 1, 0, 0, 0, 215, 920, 1, 0, 0, 0, 217, 924, 1, 0, 0, 0, 219, 929, 1, 0, 0, 0, 221, 933, 1, 0, 0, 0, 223, 937, 1, 0, 0, 0, 225, 941, 1, 0, 0, 0, 227, 944, 1, 0, 0, 0, 229, 948, 1, 0, 0, 0, 231, 952, 1, 0, 0, 0, 233, 956, 1, 0, 0, 0, 235, 960, 1, 0, 0, 0, 237, 964, 1, 0, 0, 0, 239, 969, 1, 0, 0, 0, 241, 974, 1, 0, 0, 0, 243, 979, 1, 0, 0, 0, 245, 986, 1, 0, 0, 0, 247, 990, 1, 0, 0, 0, 249, 998, 1, 0, 0, 0, 251, 1002, 1, 0, 0, 0, 253, 1006, 1, 0, 0, 0, 255, 1010, 1, 0, 0, 0, 257, 1014, 1, 0, 0, 0, 259, 1018, 1, 0, 0, 0, 261, 1024, 1, 0, 0, 0, 263, 1028, 1, 0, 0, 0, 265, 1032, 1, 0, 0, 0, 267, 1036, 1, 0, 0, 0, 269, 1040, 1, 0, 0, 0, 271, 1044, 1, 0, 0, 0, 273, 1048, 1, 0, 0, 0, 275, 1052, 1, 0, 0, 0, 277, 1056, 1, 0, 0, 0, 279, 1060, 1, 0, 0, 0, 281, 1065, 1, 0, 0, 0, 283, 1069, 1, 0, 0, 0, 285, 1073, 1, 0, 0, 0, 287, 1077, 1, 0, 0, 0, 289, 1081, 1, 0, 0, 0, 291, 1085, 1, 0, 0, 0, 293, 1089, 1, 0, 0, 0, 295, 1094, 1, 0, 0, 0, 297, 1099, 1, 0, 0, 0, 299, 1109, 1, 0, 0, 0, 301, 1113, 1, 0, 0, 0, 303, 1117, 1, 0, 0, 0, 305, 1121, 1, 0, 0, 0, 307, 1126, 1, 0, 0, 0, 309, 1133, 1, 0, 0, 0, 311, 1137, 1, 0, 0, 0, 313, 1141, 1, 0, 0, 0, 315, 1145, 1, 0, 0, 0, 317, 318, 5, 100, 0, 0, 318, 319, 5, 105, 0, 0, 319, 320, 5, 115, 0, 0, 320, 321, 5, 115, 0, 0, 321, 322, 5, 101, 0, 0, 322, 323, 5, 99, 0, 0, 323, 324, 5, 116, 0, 0, 324, 325, 1, 0, 0, 0, 325, 326, 6, 0, 0, 0, 326, 12, 1, 0, 0, 0, 327, 328, 5, 100, 0, 0, 328, 329, 5, 114, 0, 0, 329, 330, 5, 111, 0, 0, 330, 331, 5, 112, 0, 0, 331, 332, 1, 0, 0, 0, 332, 333, 6, 1, 1, 0, 333, 14, 1, 0, 0, 0, 334, 335, 5, 101, 0, 0, 335, 336, 5, 110, 0, 0, 336, 337, 5, 114, 0, 0, 337, 338, 5, 105, 0, 0, 338, 339, 5, 99, 0, 0, 339, 340, 5, 104, 0, 0, 340, 341, 1, 0, 0, 0, 341, 342, 6, 2, 2, 0, 342, 16, 1, 0, 0, 0, 343, 344, 5, 101, 0, 0, 344, 345, 5, 118, 0, 0, 345, 346, 5, 97, 0, 0, 346, 347, 5, 108, 0, 0, 347, 348, 1, 0, 0, 0, 348, 349, 6, 3, 0, 0, 349, 18, 1, 0, 0, 0, 350, 351, 5, 101, 0, 0, 351, 352, 5, 120, 0, 0, 352, 353, 5, 112, 0, 0, 353, 354, 5, 108, 0, 0, 354, 355, 5, 97, 0, 0, 355, 356, 5, 105, 0, 0, 356, 357, 5, 110, 0, 0, 357, 358, 1, 0, 0, 0, 358, 359, 6, 4, 3, 0, 359, 20, 1, 0, 0, 0, 360, 361, 5, 102, 0, 0, 361, 362, 5, 114, 0, 0, 362, 363, 5, 111, 0, 0, 363, 364, 5, 109, 0, 0, 364, 365, 1, 0, 0, 0, 365, 366, 6, 5, 4, 0, 366, 22, 1, 0, 0, 0, 367, 368, 5, 103, 0, 0, 368, 369, 5, 114, 0, 0, 369, 370, 5, 111, 0, 0, 370, 371, 5, 107, 0, 0, 371, 372, 1, 0, 0, 0, 372, 373, 6, 6, 0, 0, 373, 24, 1, 0, 0, 0, 374, 375, 5, 105, 0, 0, 375, 376, 5, 110, 0, 0, 376, 377, 5, 108, 0, 0, 377, 378, 5, 105, 0, 0, 378, 379, 5, 110, 0, 0, 379, 380, 5, 101, 0, 0, 380, 381, 5, 115, 0, 0, 381, 382, 5, 116, 0, 0, 382, 383, 5, 97, 0, 0, 383, 384, 5, 116, 0, 0, 384, 385, 5, 115, 0, 0, 385, 386, 1, 0, 0, 0, 386, 387, 6, 7, 0, 0, 387, 26, 1, 0, 0, 0, 388, 389, 5, 107, 0, 0, 389, 390, 5, 101, 0, 0, 390, 391, 5, 101, 0, 0, 391, 392, 5, 112, 0, 0, 392, 393, 1, 0, 0, 0, 393, 394, 6, 8, 1, 0, 394, 28, 1, 0, 0, 0, 395, 396, 5, 108, 0, 0, 396, 397, 5, 105, 0, 0, 397, 398, 5, 109, 0, 0, 398, 399, 5, 105, 0, 0, 399, 400, 5, 116, 0, 0, 400, 401, 1, 0, 0, 0, 401, 402, 6, 9, 0, 0, 402, 30, 1, 0, 0, 0, 403, 404, 5, 109, 0, 0, 404, 405, 5, 118, 0, 0, 405, 406, 5, 95, 0, 0, 406, 407, 5, 101, 0, 0, 407, 408, 5, 120, 0, 0, 408, 409, 5, 112, 0, 0, 409, 410, 5, 97, 0, 0, 410, 411, 5, 110, 0, 0, 411, 412, 5, 100, 0, 0, 412, 413, 1, 0, 0, 0, 413, 414, 6, 10, 5, 0, 414, 32, 1, 0, 0, 0, 415, 416, 5, 114, 0, 0, 416, 417, 5, 101, 0, 0, 417, 418, 5, 110, 0, 0, 418, 419, 5, 97, 0, 0, 419, 420, 5, 109, 0, 0, 420, 421, 5, 101, 0, 0, 421, 422, 1, 0, 0, 0, 422, 423, 6, 11, 6, 0, 423, 34, 1, 0, 0, 0, 424, 425, 5, 114, 0, 0, 425, 426, 5, 111, 0, 0, 426, 427, 5, 119, 0, 0, 427, 428, 1, 0, 0, 0, 428, 429, 6, 12, 0, 0, 429, 36, 1, 0, 0, 0, 430, 431, 5, 115, 0, 0, 431, 432, 5, 104, 0, 0, 432, 433, 5, 111, 0, 0, 433, 434, 5, 119, 0, 0, 434, 435, 1, 0, 0, 0, 435, 436, 6, 13, 7, 0, 436, 38, 1, 0, 0, 0, 437, 438, 5, 115, 0, 0, 438, 439, 5, 111, 0, 0, 439, 440, 5, 114, 0, 0, 440, 441, 5, 116, 0, 0, 441, 442, 1, 0, 0, 0, 442, 443, 6, 14, 0, 0, 443, 40, 1, 0, 0, 0, 444, 445, 5, 115, 0, 0, 445, 446, 5, 116, 0, 0, 446, 447, 5, 97, 0, 0, 447, 448, 5, 116, 0, 0, 448, 449, 5, 115, 0, 0, 449, 450, 1, 0, 0, 0, 450, 451, 6, 15, 0, 0, 451, 42, 1, 0, 0, 0, 452, 453, 5, 119, 0, 0, 453, 454, 5, 104, 0, 0, 454, 455, 5, 101, 0, 0, 455, 456, 5, 114, 0, 0, 456, 457, 5, 101, 0, 0, 457, 458, 1, 0, 0, 0, 458, 459, 6, 16, 0, 0, 459, 44, 1, 0, 0, 0, 460, 462, 8, 0, 0, 0, 461, 460, 1, 0, 0, 0, 462, 463, 1, 0, 0, 0, 463, 461, 1, 0, 0, 0, 463, 464, 1, 0, 0, 0, 464, 465, 1, 0, 0, 0, 465, 466, 6, 17, 0, 0, 466, 46, 1, 0, 0, 0, 467, 468, 5, 47, 0, 0, 468, 469, 5, 47, 0, 0, 469, 473, 1, 0, 0, 0, 470, 472, 8, 1, 0, 0, 471, 470, 1, 0, 0, 0, 472, 475, 1, 0, 0, 0, 473, 471, 1, 0, 0, 0, 473, 474, 1, 0, 0, 0, 474, 477, 1, 0, 0, 0, 475, 473, 1, 0, 0, 0, 476, 478, 5, 13, 0, 0, 477, 476, 1, 0, 0, 0, 477, 478, 1, 0, 0, 0, 478, 480, 1, 0, 0, 0, 479, 481, 5, 10, 0, 0, 480, 479, 1, 0, 0, 0, 480, 481, 1, 0, 0, 0, 481, 482, 1, 0, 0, 0, 482, 483, 6, 18, 8, 0, 483, 48, 1, 0, 0, 0, 484, 485, 5, 47, 0, 0, 485, 486, 5, 42, 0, 0, 486, 491, 1, 0, 0, 0, 487, 490, 3, 49, 19, 0, 488, 490, 9, 0, 0, 0, 489, 487, 1, 0, 0, 0, 489, 488, 1, 0, 0, 0, 490, 493, 1, 0, 0, 0, 491, 492, 1, 0, 0, 0, 491, 489, 1, 0, 0, 0, 492, 494, 1, 0, 0, 0, 493, 491, 1, 0, 0, 0, 494, 495, 5, 42, 0, 0, 495, 496, 5, 47, 0, 0, 496, 497, 1, 0, 0, 0, 497, 498, 6, 19, 8, 0, 498, 50, 1, 0, 0, 0, 499, 501, 7, 2, 0, 0, 500, 499, 1, 0, 0, 0, 501, 502, 1, 0, 0, 0, 502, 500, 1, 0, 0, 0, 502, 503, 1, 0, 0, 0, 503, 504, 1, 0, 0, 0, 504, 505, 6, 20, 8, 0, 505, 52, 1, 0, 0, 0, 506, 507, 3, 159, 74, 0, 507, 508, 1, 0, 0, 0, 508, 509, 6, 21, 9, 0, 509, 510, 6, 21, 10, 0, 510, 54, 1, 0, 0, 0, 511, 512, 3, 63, 26, 0, 512, 513, 1, 0, 0, 0, 513, 514, 6, 22, 11, 0, 514, 515, 6, 22, 12, 0, 515, 56, 1, 0, 0, 0, 516, 517, 3, 51, 20, 0, 517, 518, 1, 0, 0, 0, 518, 519, 6, 23, 8, 0, 519, 58, 1, 0, 0, 0, 520, 521, 3, 47, 18, 0, 521, 522, 1, 0, 0, 0, 522, 523, 6, 24, 8, 0, 523, 60, 1, 0, 0, 0, 524, 525, 3, 49, 19, 0, 525, 526, 1, 0, 0, 0, 526, 527, 6, 25, 8, 0, 527, 62, 1, 0, 0, 0, 528, 529, 5, 124, 0, 0, 529, 530, 1, 0, 0, 0, 530, 531, 6, 26, 12, 0, 531, 64, 1, 0, 0, 0, 532, 533, 7, 3, 0, 0, 533, 66, 1, 0, 0, 0, 534, 535, 7, 4, 0, 0, 535, 68, 1, 0, 0, 0, 536, 537, 5, 92, 0, 0, 537, 538, 7, 5, 0, 0, 538, 70, 1, 0, 0, 0, 539, 540, 8, 6, 0, 0, 540, 72, 1, 0, 0, 0, 541, 543, 7, 7, 0, 0, 542, 544, 7, 8, 0, 0, 543, 542, 1, 0, 0, 0, 543, 544, 1, 0, 0, 0, 544, 546, 1, 0, 0, 0, 545, 547, 3, 65, 27, 0, 546, 545, 1, 0, 0, 0, 547, 548, 1, 0, 0, 0, 548, 546, 1, 0, 0, 0, 548, 549, 1, 0, 0, 0, 549, 74, 1, 0, 0, 0, 550, 551, 5, 64, 0, 0, 551, 76, 1, 0, 0, 0, 552, 553, 5, 96, 0, 0, 553, 78, 1, 0, 0, 0, 554, 558, 8, 9, 0, 0, 555, 556, 5, 96, 0, 0, 556, 558, 5, 96, 0, 0, 557, 554, 1, 0, 0, 0, 557, 555, 1, 0, 0, 0, 558, 80, 1, 0, 0, 0, 559, 560, 5, 95, 0, 0, 560, 82, 1, 0, 0, 0, 561, 565, 3, 67, 28, 0, 562, 565, 3, 65, 27, 0, 563, 565, 3, 81, 35, 0, 564, 561, 1, 0, 0, 0, 564, 562, 1, 0, 0, 0, 564, 563, 1, 0, 0, 0, 565, 84, 1, 0, 0, 0, 566, 571, 5, 34, 0, 0, 567, 570, 3, 69, 29, 0, 568, 570, 3, 71, 30, 0, 569, 567, 1, 0, 0, 0, 569, 568, 1, 0, 0, 0, 570, 573, 1, 0, 0, 0, 571, 569, 1, 0, 0, 0, 571, 572, 1, 0, 0, 0, 572, 574, 1, 0, 0, 0, 573, 571, 1, 0, 0, 0, 574, 596, 5, 34, 0, 0, 575, 576, 5, 34, 0, 0, 576, 577, 5, 34, 0, 0, 577, 578, 5, 34, 0, 0, 578, 582, 1, 0, 0, 0, 579, 581, 8, 1, 0, 0, 580, 579, 1, 0, 0, 0, 581, 584, 1, 0, 0, 0, 582, 583, 1, 0, 0, 0, 582, 580, 1, 0, 0, 0, 583, 585, 1, 0, 0, 0, 584, 582, 1, 0, 0, 0, 585, 586, 5, 34, 0, 0, 586, 587, 5, 34, 0, 0, 587, 588, 5, 34, 0, 0, 588, 590, 1, 0, 0, 0, 589, 591, 5, 34, 0, 0, 590, 589, 1, 0, 0, 0, 590, 591, 1, 0, 0, 0, 591, 593, 1, 0, 0, 0, 592, 594, 5, 34, 0, 0, 593, 592, 1, 0, 0, 0, 593, 594, 1, 0, 0, 0, 594, 596, 1, 0, 0, 0, 595, 566, 1, 0, 0, 0, 595, 575, 1, 0, 0, 0, 596, 86, 1, 0, 0, 0, 597, 599, 3, 65, 27, 0, 598, 597, 1, 0, 0, 0, 599, 600, 1, 0, 0, 0, 600, 598, 1, 0, 0, 0, 600, 601, 1, 0, 0, 0, 601, 88, 1, 0, 0, 0, 602, 604, 3, 65, 27, 0, 603, 602, 1, 0, 0, 0, 604, 605, 1, 0, 0, 0, 605, 603, 1, 0, 0, 0, 605, 606, 1, 0, 0, 0, 606, 607, 1, 0, 0, 0, 607, 611, 3, 103, 46, 0, 608, 610, 3, 65, 27, 0, 609, 608, 1, 0, 0, 0, 610, 613, 1, 0, 0, 0, 611, 609, 1, 0, 0, 0, 611, 612, 1, 0, 0, 0, 612, 645, 1, 0, 0, 0, 613, 611, 1, 0, 0, 0, 614, 616, 3, 103, 46, 0, 615, 617, 3, 65, 27, 0, 616, 615, 1, 0, 0, 0, 617, 618, 1, 0, 0, 0, 618, 616, 1, 0, 0, 0, 618, 619, 1, 0, 0, 0, 619, 645, 1, 0, 0, 0, 620, 622, 3, 65, 27, 0, 621, 620, 1, 0, 0, 0, 622, 623, 1, 0, 0, 0, 623, 621, 1, 0, 0, 0, 623, 624, 1, 0, 0, 0, 624, 632, 1, 0, 0, 0, 625, 629, 3, 103, 46, 0, 626, 628, 3, 65, 27, 0, 627, 626, 1, 0, 0, 0, 628, 631, 1, 0, 0, 0, 629, 627, 1, 0, 0, 0, 629, 630, 1, 0, 0, 0, 630, 633, 1, 0, 0, 0, 631, 629, 1, 0, 0, 0, 632, 625, 1, 0, 0, 0, 632, 633, 1, 0, 0, 0, 633, 634, 1, 0, 0, 0, 634, 635, 3, 73, 31, 0, 635, 645, 1, 0, 0, 0, 636, 638, 3, 103, 46, 0, 637, 639, 3, 65, 27, 0, 638, 637, 1, 0, 0, 0, 639, 640, 1, 0, 0, 0, 640, 638, 1, 0, 0, 0, 640, 641, 1, 0, 0, 0, 641, 642, 1, 0, 0, 0, 642, 643, 3, 73, 31, 0, 643, 645, 1, 0, 0, 0, 644, 603, 1, 0, 0, 0, 644, 614, 1, 0, 0, 0, 644, 621, 1, 0, 0, 0, 644, 636, 1, 0, 0, 0, 645, 90, 1, 0, 0, 0, 646, 647, 5, 98, 0, 0, 647, 648, 5, 121, 0, 0, 648, 92, 1, 0, 0, 0, 649, 650, 5, 97, 0, 0, 650, 651, 5, 110, 0, 0, 651, 652, 5, 100, 0, 0, 652, 94, 1, 0, 0, 0, 653, 654, 5, 97, 0, 0, 654, 655, 5, 115, 0, 0, 655, 656, 5, 99, 0, 0, 656, 96, 1, 0, 0, 0, 657, 658, 5, 61, 0, 0, 658, 98, 1, 0, 0, 0, 659, 660, 5, 44, 0, 0, 660, 100, 1, 0, 0, 0, 661, 662, 5, 100, 0, 0, 662, 663, 5, 101, 0, 0, 663, 664, 5, 115, 0, 0, 664, 665, 5, 99, 0, 0, 665, 102, 1, 0, 0, 0, 666, 667, 5, 46, 0, 0, 667, 104, 1, 0, 0, 0, 668, 669, 5, 102, 0, 0, 669, 670, 5, 97, 0, 0, 670, 671, 5, 108, 0, 0, 671, 672, 5, 115, 0, 0, 672, 673, 5, 101, 0, 0, 673, 106, 1, 0, 0, 0, 674, 675, 5, 102, 0, 0, 675, 676, 5, 105, 0, 0, 676, 677, 5, 114, 0, 0, 677, 678, 5, 115, 0, 0, 678, 679, 5, 116, 0, 0, 679, 108, 1, 0, 0, 0, 680, 681, 5, 108, 0, 0, 681, 682, 5, 97, 0, 0, 682, 683, 5, 115, 0, 0, 683, 684, 5, 116, 0, 0, 684, 110, 1, 0, 0, 0, 685, 686, 5, 40, 0, 0, 686, 112, 1, 0, 0, 0, 687, 688, 5, 105, 0, 0, 688, 689, 5, 110, 0, 0, 689, 114, 1, 0, 0, 0, 690, 691, 5, 105, 0, 0, 691, 692, 5, 115, 0, 0, 692, 116, 1, 0, 0, 0, 693, 694, 5, 108, 0, 0, 694, 695, 5, 105, 0, 0, 695, 696, 5, 107, 0, 0, 696, 697, 5, 101, 0, 0, 697, 118, 1, 0, 0, 0, 698, 699, 5, 110, 0, 0, 699, 700, 5, 111, 0, 0, 700, 701, 5, 116, 0, 0, 701, 120, 1, 0, 0, 0, 702, 703, 5, 110, 0, 0, 703, 704, 5, 117, 0, 0, 704, 705, 5, 108, 0, 0, 705, 706, 5, 108, 0, 0, 706, 122, 1, 0, 0, 0, 707, 708, 5, 110, 0, 0, 708, 709, 5, 117, 0, 0, 709, 710, 5, 108, 0, 0, 710, 711, 5, 108, 0, 0, 711, 712, 5, 115, 0, 0, 712, 124, 1, 0, 0, 0, 713, 714, 5, 111, 0, 0, 714, 715, 5, 114, 0, 0, 715, 126, 1, 0, 0, 0, 716, 717, 5, 63, 0, 0, 717, 128, 1, 0, 0, 0, 718, 719, 5, 114, 0, 0, 719, 720, 5, 108, 0, 0, 720, 721, 5, 105, 0, 0, 721, 722, 5, 107, 0, 0, 722, 723, 5, 101, 0, 0, 723, 130, 1, 0, 0, 0, 724, 725, 5, 41, 0, 0, 725, 132, 1, 0, 0, 0, 726, 727, 5, 116, 0, 0, 727, 728, 5, 114, 0, 0, 728, 729, 5, 117, 0, 0, 729, 730, 5, 101, 0, 0, 730, 134, 1, 0, 0, 0, 731, 732, 5, 61, 0, 0, 732, 733, 5, 61, 0, 0, 733, 136, 1, 0, 0, 0, 734, 735, 5, 61, 0, 0, 735, 736, 5, 126, 0, 0, 736, 138, 1, 0, 0, 0, 737, 738, 5, 33, 0, 0, 738, 739, 5, 61, 0, 0, 739, 140, 1, 0, 0, 0, 740, 741, 5, 60, 0, 0, 741, 142, 1, 0, 0, 0, 742, 743, 5, 60, 0, 0, 743, 744, 5, 61, 0, 0, 744, 144, 1, 0, 0, 0, 745, 746, 5, 62, 0, 0, 746, 146, 1, 0, 0, 0, 747, 748, 5, 62, 0, 0, 748, 749, 5, 61, 0, 0, 749, 148, 1, 0, 0, 0, 750, 751, 5, 43, 0, 0, 751, 150, 1, 0, 0, 0, 752, 753, 5, 45, 0, 0, 753, 152, 1, 0, 0, 0, 754, 755, 5, 42, 0, 0, 755, 154, 1, 0, 0, 0, 756, 757, 5, 47, 0, 0, 757, 156, 1, 0, 0, 0, 758, 759, 5, 37, 0, 0, 759, 158, 1, 0, 0, 0, 760, 761, 5, 91, 0, 0, 761, 762, 1, 0, 0, 0, 762, 763, 6, 74, 0, 0, 763, 764, 6, 74, 0, 0, 764, 160, 1, 0, 0, 0, 765, 766, 5, 93, 0, 0, 766, 767, 1, 0, 0, 0, 767, 768, 6, 75, 12, 0, 768, 769, 6, 75, 12, 0, 769, 162, 1, 0, 0, 0, 770, 774, 3, 67, 28, 0, 771, 773, 3, 83, 36, 0, 772, 771, 1, 0, 0, 0, 773, 776, 1, 0, 0, 0, 774, 772, 1, 0, 0, 0, 774, 775, 1, 0, 0, 0, 775, 787, 1, 0, 0, 0, 776, 774, 1, 0, 0, 0, 777, 780, 3, 81, 35, 0, 778, 780, 3, 75, 32, 0, 779, 777, 1, 0, 0, 0, 779, 778, 1, 0, 0, 0, 780, 782, 1, 0, 0, 0, 781, 783, 3, 83, 36, 0, 782, 781, 1, 0, 0, 0, 783, 784, 1, 0, 0, 0, 784, 782, 1, 0, 0, 0, 784, 785, 1, 0, 0, 0, 785, 787, 1, 0, 0, 0, 786, 770, 1, 0, 0, 0, 786, 779, 1, 0, 0, 0, 787, 164, 1, 0, 0, 0, 788, 790, 3, 77, 33, 0, 789, 791, 3, 79, 34, 0, 790, 789, 1, 0, 0, 0, 791, 792, 1, 0, 0, 0, 792, 790, 1, 0, 0, 0, 792, 793, 1, 0, 0, 0, 793, 794, 1, 0, 0, 0, 794, 795, 3, 77, 33, 0, 795, 166, 1, 0, 0, 0, 796, 797, 3, 47, 18, 0, 797, 798, 1, 0, 0, 0, 798, 799, 6, 78, 8, 0, 799, 168, 1, 0, 0, 0, 800, 801, 3, 49, 19, 0, 801, 802, 1, 0, 0, 0, 802, 803, 6, 79, 8, 0, 803, 170, 1, 0, 0, 0, 804, 805, 3, 51, 20, 0, 805, 806, 1, 0, 0, 0, 806, 807, 6, 80, 8, 0, 807, 172, 1, 0, 0, 0, 808, 809, 3, 63, 26, 0, 809, 810, 1, 0, 0, 0, 810, 811, 6, 81, 11, 0, 811, 812, 6, 81, 12, 0, 812, 174, 1, 0, 0, 0, 813, 814, 3, 159, 74, 0, 814, 815, 1, 0, 0, 0, 815, 816, 6, 82, 9, 0, 816, 176, 1, 0, 0, 0, 817, 818, 3, 161, 75, 0, 818, 819, 1, 0, 0, 0, 819, 820, 6, 83, 13, 0, 820, 178, 1, 0, 0, 0, 821, 822, 3, 99, 44, 0, 822, 823, 1, 0, 0, 0, 823, 824, 6, 84, 14, 0, 824, 180, 1, 0, 0, 0, 825, 826, 3, 97, 43, 0, 826, 827, 1, 0, 0, 0, 827, 828, 6, 85, 15, 0, 828, 182, 1, 0, 0, 0, 829, 830, 5, 109, 0, 0, 830, 831, 5, 101, 0, 0, 831, 832, 5, 116, 0, 0, 832, 833, 5, 97, 0, 0, 833, 834, 5, 100, 0, 0, 834, 835, 5, 97, 0, 0, 835, 836, 5, 116, 0, 0, 836, 837, 5, 97, 0, 0, 837, 184, 1, 0, 0, 0, 838, 842, 8, 10, 0, 0, 839, 840, 5, 47, 0, 0, 840, 842, 8, 11, 0, 0, 841, 838, 1, 0, 0, 0, 841, 839, 1, 0, 0, 0, 842, 186, 1, 0, 0, 0, 843, 845, 3, 185, 87, 0, 844, 843, 1, 0, 0, 0, 845, 846, 1, 0, 0, 0, 846, 844, 1, 0, 0, 0, 846, 847, 1, 0, 0, 0, 847, 188, 1, 0, 0, 0, 848, 849, 3, 165, 77, 0, 849, 850, 1, 0, 0, 0, 850, 851, 6, 89, 16, 0, 851, 190, 1, 0, 0, 0, 852, 853, 3, 47, 18, 0, 853, 854, 1, 0, 0, 0, 854, 855, 6, 90, 8, 0, 855, 192, 1, 0, 0, 0, 856, 857, 3, 49, 19, 0, 857, 858, 1, 0, 0, 0, 858, 859, 6, 91, 8, 0, 859, 194, 1, 0, 0, 0, 860, 861, 3, 51, 20, 0, 861, 862, 1, 0, 0, 0, 862, 863, 6, 92, 8, 0, 863, 196, 1, 0, 0, 0, 864, 865, 3, 63, 26, 0, 865, 866, 1, 0, 0, 0, 866, 867, 6, 93, 11, 0, 867, 868, 6, 93, 12, 0, 868, 198, 1, 0, 0, 0, 869, 870, 3, 103, 46, 0, 870, 871, 1, 0, 0, 0, 871, 872, 6, 94, 17, 0, 872, 200, 1, 0, 0, 0, 873, 874, 3, 99, 44, 0, 874, 875, 1, 0, 0, 0, 875, 876, 6, 95, 14, 0, 876, 202, 1, 0, 0, 0, 877, 882, 3, 67, 28, 0, 878, 882, 3, 65, 27, 0, 879, 882, 3, 81, 35, 0, 880, 882, 3, 153, 71, 0, 881, 877, 1, 0, 0, 0, 881, 878, 1, 0, 0, 0, 881, 879, 1, 0, 0, 0, 881, 880, 1, 0, 0, 0, 882, 204, 1, 0, 0, 0, 883, 886, 3, 67, 28, 0, 884, 886, 3, 153, 71, 0, 885, 883, 1, 0, 0, 0, 885, 884, 1, 0, 0, 0, 886, 890, 1, 0, 0, 0, 887, 889, 3, 203, 96, 0, 888, 887, 1, 0, 0, 0, 889, 892, 1, 0, 0, 0, 890, 888, 1, 0, 0, 0, 890, 891, 1, 0, 0, 0, 891, 903, 1, 0, 0, 0, 892, 890, 1, 0, 0, 0, 893, 896, 3, 81, 35, 0, 894, 896, 3, 75, 32, 0, 895, 893, 1, 0, 0, 0, 895, 894, 1, 0, 0, 0, 896, 898, 1, 0, 0, 0, 897, 899, 3, 203, 96, 0, 898, 897, 1, 0, 0, 0, 899, 900, 1, 0, 0, 0, 900, 898, 1, 0, 0, 0, 900, 901, 1, 0, 0, 0, 901, 903, 1, 0, 0, 0, 902, 885, 1, 0, 0, 0, 902, 895, 1, 0, 0, 0, 903, 206, 1, 0, 0, 0, 904, 905, 3, 205, 97, 0, 905, 906, 1, 0, 0, 0, 906, 907, 6, 98, 18, 0, 907, 208, 1, 0, 0, 0, 908, 909, 3, 165, 77, 0, 909, 910, 1, 0, 0, 0, 910, 911, 6, 99, 16, 0, 911, 210, 1, 0, 0, 0, 912, 913, 3, 47, 18, 0, 913, 914, 1, 0, 0, 0, 914, 915, 6, 100, 8, 0, 915, 212, 1, 0, 0, 0, 916, 917, 3, 49, 19, 0, 917, 918, 1, 0, 0, 0, 918, 919, 6, 101, 8, 0, 919, 214, 1, 0, 0, 0, 920, 921, 3, 51, 20, 0, 921, 922, 1, 0, 0, 0, 922, 923, 6, 102, 8, 0, 923, 216, 1, 0, 0, 0, 924, 925, 3, 63, 26, 0, 925, 926, 1, 0, 0, 0, 926, 927, 6, 103, 11, 0, 927, 928, 6, 103, 12, 0, 928, 218, 1, 0, 0, 0, 929, 930, 3, 97, 43, 0, 930, 931, 1, 0, 0, 0, 931, 932, 6, 104, 15, 0, 932, 220, 1, 0, 0, 0, 933, 934, 3, 99, 44, 0, 934, 935, 1, 0, 0, 0, 935, 936, 6, 105, 14, 0, 936, 222, 1, 0, 0, 0, 937, 938, 3, 103, 46, 0, 938, 939, 1, 0, 0, 0, 939, 940, 6, 106, 17, 0, 940, 224, 1, 0, 0, 0, 941, 942, 5, 97, 0, 0, 942, 943, 5, 115, 0, 0, 943, 226, 1, 0, 0, 0, 944, 945, 3, 165, 77, 0, 945, 946, 1, 0, 0, 0, 946, 947, 6, 108, 16, 0, 947, 228, 1, 0, 0, 0, 948, 949, 3, 205, 97, 0, 949, 950, 1, 0, 0, 0, 950, 951, 6, 109, 18, 0, 951, 230, 1, 0, 0, 0, 952, 953, 3, 47, 18, 0, 953, 954, 1, 0, 0, 0, 954, 955, 6, 110, 8, 0, 955, 232, 1, 0, 0, 0, 956, 957, 3, 49, 19, 0, 957, 958, 1, 0, 0, 0, 958, 959, 6, 111, 8, 0, 959, 234, 1, 0, 0, 0, 960, 961, 3, 51, 20, 0, 961, 962, 1, 0, 0, 0, 962, 963, 6, 112, 8, 0, 963, 236, 1, 0, 0, 0, 964, 965, 3, 63, 26, 0, 965, 966, 1, 0, 0, 0, 966, 967, 6, 113, 11, 0, 967, 968, 6, 113, 12, 0, 968, 238, 1, 0, 0, 0, 969, 970, 3, 159, 74, 0, 970, 971, 1, 0, 0, 0, 971, 972, 6, 114, 9, 0, 972, 973, 6, 114, 19, 0, 973, 240, 1, 0, 0, 0, 974, 975, 5, 111, 0, 0, 975, 976, 5, 110, 0, 0, 976, 977, 1, 0, 0, 0, 977, 978, 6, 115, 20, 0, 978, 242, 1, 0, 0, 0, 979, 980, 5, 119, 0, 0, 980, 981, 5, 105, 0, 0, 981, 982, 5, 116, 0, 0, 982, 983, 5, 104, 0, 0, 983, 984, 1, 0, 0, 0, 984, 985, 6, 116, 20, 0, 985, 244, 1, 0, 0, 0, 986, 987, 8, 12, 0, 0, 987, 246, 1, 0, 0, 0, 988, 991, 3, 67, 28, 0, 989, 991, 3, 65, 27, 0, 990, 988, 1, 0, 0, 0, 990, 989, 1, 0, 0, 0, 991, 995, 1, 0, 0, 0, 992, 994, 3, 245, 117, 0, 993, 992, 1, 0, 0, 0, 994, 997, 1, 0, 0, 0, 995, 993, 1, 0, 0, 0, 995, 996, 1, 0, 0, 0, 996, 248, 1, 0, 0, 0, 997, 995, 1, 0, 0, 0, 998, 999, 3, 165, 77, 0, 999, 1000, 1, 0, 0, 0, 1000, 1001, 6, 119, 16, 0, 1001, 250, 1, 0, 0, 0, 1002, 1003, 3, 247, 118, 0, 1003, 1004, 1, 0, 0, 0, 1004, 1005, 6, 120, 21, 0, 1005, 252, 1, 0, 0, 0, 1006, 1007, 3, 47, 18, 0, 1007, 1008, 1, 0, 0, 0, 1008, 1009, 6, 121, 8, 0, 1009, 254, 1, 0, 0, 0, 1010, 1011, 3, 49, 19, 0, 1011, 1012, 1, 0, 0, 0, 1012, 1013, 6, 122, 8, 0, 1013, 256, 1, 0, 0, 0, 1014, 1015, 3, 51, 20, 0, 1015, 1016, 1, 0, 0, 0, 1016, 1017, 6, 123, 8, 0, 1017, 258, 1, 0, 0, 0, 1018, 1019, 3, 63, 26, 0, 1019, 1020, 1, 0, 0, 0, 1020, 1021, 6, 124, 11, 0, 1021, 1022, 6, 124, 12, 0, 1022, 1023, 6, 124, 12, 0, 1023, 260, 1, 0, 0, 0, 1024, 1025, 3, 97, 43, 0, 1025, 1026, 1, 0, 0, 0, 1026, 1027, 6, 125, 15, 0, 1027, 262, 1, 0, 0, 0, 1028, 1029, 3, 99, 44, 0, 1029, 1030, 1, 0, 0, 0, 1030, 1031, 6, 126, 14, 0, 1031, 264, 1, 0, 0, 0, 1032, 1033, 3, 103, 46, 0, 1033, 1034, 1, 0, 0, 0, 1034, 1035, 6, 127, 17, 0, 1035, 266, 1, 0, 0, 0, 1036, 1037, 3, 243, 116, 0, 1037, 1038, 1, 0, 0, 0, 1038, 1039, 6, 128, 22, 0, 1039, 268, 1, 0, 0, 0, 1040, 1041, 3, 205, 97, 0, 1041, 1042, 1, 0, 0, 0, 1042, 1043, 6, 129, 18, 0, 1043, 270, 1, 0, 0, 0, 1044, 1045, 3, 165, 77, 0, 1045, 1046, 1, 0, 0, 0, 1046, 1047, 6, 130, 16, 0, 1047, 272, 1, 0, 0, 0, 1048, 1049, 3, 47, 18, 0, 1049, 1050, 1, 0, 0, 0, 1050, 1051, 6, 131, 8, 0, 1051, 274, 1, 0, 0, 0, 1052, 1053, 3, 49, 19, 0, 1053, 1054, 1, 0, 0, 0, 1054, 1055, 6, 132, 8, 0, 1055, 276, 1, 0, 0, 0, 1056, 1057, 3, 51, 20, 0, 1057, 1058, 1, 0, 0, 0, 1058, 1059, 6, 133, 8, 0, 1059, 278, 1, 0, 0, 0, 1060, 1061, 3, 63, 26, 0, 1061, 1062, 1, 0, 0, 0, 1062, 1063, 6, 134, 11, 0, 1063, 1064, 6, 134, 12, 0, 1064, 280, 1, 0, 0, 0, 1065, 1066, 3, 103, 46, 0, 1066, 1067, 1, 0, 0, 0, 1067, 1068, 6, 135, 17, 0, 1068, 282, 1, 0, 0, 0, 1069, 1070, 3, 165, 77, 0, 1070, 1071, 1, 0, 0, 0, 1071, 1072, 6, 136, 16, 0, 1072, 284, 1, 0, 0, 0, 1073, 1074, 3, 163, 76, 0, 1074, 1075, 1, 0, 0, 0, 1075, 1076, 6, 137, 23, 0, 1076, 286, 1, 0, 0, 0, 1077, 1078, 3, 47, 18, 0, 1078, 1079, 1, 0, 0, 0, 1079, 1080, 6, 138, 8, 0, 1080, 288, 1, 0, 0, 0, 1081, 1082, 3, 49, 19, 0, 1082, 1083, 1, 0, 0, 0, 1083, 1084, 6, 139, 8, 0, 1084, 290, 1, 0, 0, 0, 1085, 1086, 3, 51, 20, 0, 1086, 1087, 1, 0, 0, 0, 1087, 1088, 6, 140, 8, 0, 1088, 292, 1, 0, 0, 0, 1089, 1090, 3, 63, 26, 0, 1090, 1091, 1, 0, 0, 0, 1091, 1092, 6, 141, 11, 0, 1092, 1093, 6, 141, 12, 0, 1093, 294, 1, 0, 0, 0, 1094, 1095, 5, 105, 0, 0, 1095, 1096, 5, 110, 0, 0, 1096, 1097, 5, 102, 0, 0, 1097, 1098, 5, 111, 0, 0, 1098, 296, 1, 0, 0, 0, 1099, 1100, 5, 102, 0, 0, 1100, 1101, 5, 117, 0, 0, 1101, 1102, 5, 110, 0, 0, 1102, 1103, 5, 99, 0, 0, 1103, 1104, 5, 116, 0, 0, 1104, 1105, 5, 105, 0, 0, 1105, 1106, 5, 111, 0, 0, 1106, 1107, 5, 110, 0, 0, 1107, 1108, 5, 115, 0, 0, 1108, 298, 1, 0, 0, 0, 1109, 1110, 3, 47, 18, 0, 1110, 1111, 1, 0, 0, 0, 1111, 1112, 6, 144, 8, 0, 1112, 300, 1, 0, 0, 0, 1113, 1114, 3, 49, 19, 0, 1114, 1115, 1, 0, 0, 0, 1115, 1116, 6, 145, 8, 0, 1116, 302, 1, 0, 0, 0, 1117, 1118, 3, 51, 20, 0, 1118, 1119, 1, 0, 0, 0, 1119, 1120, 6, 146, 8, 0, 1120, 304, 1, 0, 0, 0, 1121, 1122, 3, 161, 75, 0, 1122, 1123, 1, 0, 0, 0, 1123, 1124, 6, 147, 13, 0, 1124, 1125, 6, 147, 12, 0, 1125, 306, 1, 0, 0, 0, 1126, 1127, 5, 58, 0, 0, 1127, 308, 1, 0, 0, 0, 1128, 1134, 3, 75, 32, 0, 1129, 1134, 3, 65, 27, 0, 1130, 1134, 3, 103, 46, 0, 1131, 1134, 3, 67, 28, 0, 1132, 1134, 3, 81, 35, 0, 1133, 1128, 1, 0, 0, 0, 1133, 1129, 1, 0, 0, 0, 1133, 1130, 1, 0, 0, 0, 1133, 1131, 1, 0, 0, 0, 1133, 1132, 1, 0, 0, 0, 1134, 1135, 1, 0, 0, 0, 1135, 1133, 1, 0, 0, 0, 1135, 1136, 1, 0, 0, 0, 1136, 310, 1, 0, 0, 0, 1137, 1138, 3, 47, 18, 0, 1138, 1139, 1, 0, 0, 0, 1139, 1140, 6, 150, 8, 0, 1140, 312, 1, 0, 0, 0, 1141, 1142, 3, 49, 19, 0, 1142, 1143, 1, 0, 0, 0, 1143, 1144, 6, 151, 8, 0, 1144, 314, 1, 0, 0, 0, 1145, 1146, 3, 51, 20, 0, 1146, 1147, 1, 0, 0, 0, 1147, 1148, 6, 152, 8, 0, 1148, 316, 1, 0, 0, 0, 54, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 463, 473, 477, 480, 489, 491, 502, 543, 548, 557, 564, 569, 571, 582, 590, 593, 595, 600, 605, 611, 618, 623, 629, 632, 640, 644, 774, 779, 784, 786, 792, 841, 846, 881, 885, 890, 895, 900, 902, 990, 995, 1133, 1135, 24, 5, 2, 0, 5, 4, 0, 5, 6, 0, 5, 1, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 0, 1, 0, 7, 63, 0, 5, 0, 0, 7, 25, 0, 4, 0, 0, 7, 64, 0, 7, 33, 0, 7, 32, 0, 7, 66, 0, 7, 35, 0, 7, 75, 0, 5, 10, 0, 5, 7, 0, 7, 85, 0, 7, 84, 0, 7, 65, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index 7aa02a707e03e..d6b5b21bca133 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -18,25 +18,25 @@ public class EsqlBaseLexer extends Lexer { new PredictionContextCache(); public static final int DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, INLINESTATS=8, - KEEP=9, LIMIT=10, MV_EXPAND=11, PROJECT=12, RENAME=13, ROW=14, SHOW=15, - SORT=16, STATS=17, WHERE=18, UNKNOWN_CMD=19, LINE_COMMENT=20, MULTILINE_COMMENT=21, - WS=22, EXPLAIN_WS=23, EXPLAIN_LINE_COMMENT=24, EXPLAIN_MULTILINE_COMMENT=25, - PIPE=26, STRING=27, INTEGER_LITERAL=28, DECIMAL_LITERAL=29, BY=30, AND=31, - ASC=32, ASSIGN=33, COMMA=34, DESC=35, DOT=36, FALSE=37, FIRST=38, LAST=39, - LP=40, IN=41, IS=42, LIKE=43, NOT=44, NULL=45, NULLS=46, OR=47, PARAM=48, - RLIKE=49, RP=50, TRUE=51, EQ=52, CIEQ=53, NEQ=54, LT=55, LTE=56, GT=57, - GTE=58, PLUS=59, MINUS=60, ASTERISK=61, SLASH=62, PERCENT=63, OPENING_BRACKET=64, - CLOSING_BRACKET=65, UNQUOTED_IDENTIFIER=66, QUOTED_IDENTIFIER=67, EXPR_LINE_COMMENT=68, - EXPR_MULTILINE_COMMENT=69, EXPR_WS=70, METADATA=71, FROM_UNQUOTED_IDENTIFIER=72, - FROM_LINE_COMMENT=73, FROM_MULTILINE_COMMENT=74, FROM_WS=75, UNQUOTED_ID_PATTERN=76, - PROJECT_LINE_COMMENT=77, PROJECT_MULTILINE_COMMENT=78, PROJECT_WS=79, - AS=80, RENAME_LINE_COMMENT=81, RENAME_MULTILINE_COMMENT=82, RENAME_WS=83, - ON=84, WITH=85, ENRICH_POLICY_NAME=86, ENRICH_LINE_COMMENT=87, ENRICH_MULTILINE_COMMENT=88, - ENRICH_WS=89, ENRICH_FIELD_LINE_COMMENT=90, ENRICH_FIELD_MULTILINE_COMMENT=91, - ENRICH_FIELD_WS=92, MVEXPAND_LINE_COMMENT=93, MVEXPAND_MULTILINE_COMMENT=94, - MVEXPAND_WS=95, INFO=96, FUNCTIONS=97, SHOW_LINE_COMMENT=98, SHOW_MULTILINE_COMMENT=99, - SHOW_WS=100, COLON=101, SETTING=102, SETTING_LINE_COMMENT=103, SETTTING_MULTILINE_COMMENT=104, - SETTING_WS=105; + KEEP=9, LIMIT=10, MV_EXPAND=11, RENAME=12, ROW=13, SHOW=14, SORT=15, STATS=16, + WHERE=17, UNKNOWN_CMD=18, LINE_COMMENT=19, MULTILINE_COMMENT=20, WS=21, + EXPLAIN_WS=22, EXPLAIN_LINE_COMMENT=23, EXPLAIN_MULTILINE_COMMENT=24, + PIPE=25, STRING=26, INTEGER_LITERAL=27, DECIMAL_LITERAL=28, BY=29, AND=30, + ASC=31, ASSIGN=32, COMMA=33, DESC=34, DOT=35, FALSE=36, FIRST=37, LAST=38, + LP=39, IN=40, IS=41, LIKE=42, NOT=43, NULL=44, NULLS=45, OR=46, PARAM=47, + RLIKE=48, RP=49, TRUE=50, EQ=51, CIEQ=52, NEQ=53, LT=54, LTE=55, GT=56, + GTE=57, PLUS=58, MINUS=59, ASTERISK=60, SLASH=61, PERCENT=62, OPENING_BRACKET=63, + CLOSING_BRACKET=64, UNQUOTED_IDENTIFIER=65, QUOTED_IDENTIFIER=66, EXPR_LINE_COMMENT=67, + EXPR_MULTILINE_COMMENT=68, EXPR_WS=69, METADATA=70, FROM_UNQUOTED_IDENTIFIER=71, + FROM_LINE_COMMENT=72, FROM_MULTILINE_COMMENT=73, FROM_WS=74, UNQUOTED_ID_PATTERN=75, + PROJECT_LINE_COMMENT=76, PROJECT_MULTILINE_COMMENT=77, PROJECT_WS=78, + AS=79, RENAME_LINE_COMMENT=80, RENAME_MULTILINE_COMMENT=81, RENAME_WS=82, + ON=83, WITH=84, ENRICH_POLICY_NAME=85, ENRICH_LINE_COMMENT=86, ENRICH_MULTILINE_COMMENT=87, + ENRICH_WS=88, ENRICH_FIELD_LINE_COMMENT=89, ENRICH_FIELD_MULTILINE_COMMENT=90, + ENRICH_FIELD_WS=91, MVEXPAND_LINE_COMMENT=92, MVEXPAND_MULTILINE_COMMENT=93, + MVEXPAND_WS=94, INFO=95, FUNCTIONS=96, SHOW_LINE_COMMENT=97, SHOW_MULTILINE_COMMENT=98, + SHOW_WS=99, COLON=100, SETTING=101, SETTING_LINE_COMMENT=102, SETTTING_MULTILINE_COMMENT=103, + SETTING_WS=104; public static final int EXPLAIN_MODE=1, EXPRESSION_MODE=2, FROM_MODE=3, PROJECT_MODE=4, RENAME_MODE=5, ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9, SETTING_MODE=10; @@ -53,36 +53,36 @@ public class EsqlBaseLexer extends Lexer { private static String[] makeRuleNames() { return new String[] { "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", "INLINESTATS", - "KEEP", "LIMIT", "MV_EXPAND", "PROJECT", "RENAME", "ROW", "SHOW", "SORT", - "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", - "WS", "EXPLAIN_OPENING_BRACKET", "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", - "EXPLAIN_MULTILINE_COMMENT", "PIPE", "DIGIT", "LETTER", "ESCAPE_SEQUENCE", - "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", "BACKQUOTE_BLOCK", - "UNDERSCORE", "UNQUOTED_ID_BODY", "STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", - "BY", "AND", "ASC", "ASSIGN", "COMMA", "DESC", "DOT", "FALSE", "FIRST", - "LAST", "LP", "IN", "IS", "LIKE", "NOT", "NULL", "NULLS", "OR", "PARAM", - "RLIKE", "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", - "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", - "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", - "EXPR_WS", "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", - "FROM_COMMA", "FROM_ASSIGN", "METADATA", "FROM_UNQUOTED_IDENTIFIER_PART", - "FROM_UNQUOTED_IDENTIFIER", "FROM_QUOTED_IDENTIFIER", "FROM_LINE_COMMENT", - "FROM_MULTILINE_COMMENT", "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", - "UNQUOTED_ID_BODY_WITH_PATTERN", "UNQUOTED_ID_PATTERN", "PROJECT_UNQUOTED_IDENTIFIER", - "PROJECT_QUOTED_IDENTIFIER", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", - "PROJECT_WS", "RENAME_PIPE", "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", - "AS", "RENAME_QUOTED_IDENTIFIER", "RENAME_UNQUOTED_IDENTIFIER", "RENAME_LINE_COMMENT", - "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ENRICH_PIPE", "ENRICH_OPENING_BRACKET", - "ON", "WITH", "ENRICH_POLICY_NAME_BODY", "ENRICH_POLICY_NAME", "ENRICH_QUOTED_IDENTIFIER", - "ENRICH_MODE_UNQUOTED_VALUE", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", - "ENRICH_WS", "ENRICH_FIELD_PIPE", "ENRICH_FIELD_ASSIGN", "ENRICH_FIELD_COMMA", - "ENRICH_FIELD_DOT", "ENRICH_FIELD_WITH", "ENRICH_FIELD_UNQUOTED_IDENTIFIER", - "ENRICH_FIELD_QUOTED_IDENTIFIER", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", - "ENRICH_FIELD_WS", "MVEXPAND_PIPE", "MVEXPAND_DOT", "MVEXPAND_QUOTED_IDENTIFIER", - "MVEXPAND_UNQUOTED_IDENTIFIER", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", - "MVEXPAND_WS", "SHOW_PIPE", "INFO", "FUNCTIONS", "SHOW_LINE_COMMENT", - "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING_CLOSING_BRACKET", "COLON", - "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", "SETTING_WS" + "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", + "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "EXPLAIN_OPENING_BRACKET", + "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", + "PIPE", "DIGIT", "LETTER", "ESCAPE_SEQUENCE", "UNESCAPED_CHARS", "EXPONENT", + "ASPERAND", "BACKQUOTE", "BACKQUOTE_BLOCK", "UNDERSCORE", "UNQUOTED_ID_BODY", + "STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", + "COMMA", "DESC", "DOT", "FALSE", "FIRST", "LAST", "LP", "IN", "IS", "LIKE", + "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", + "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", + "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", + "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", + "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", "FROM_COMMA", + "FROM_ASSIGN", "METADATA", "FROM_UNQUOTED_IDENTIFIER_PART", "FROM_UNQUOTED_IDENTIFIER", + "FROM_QUOTED_IDENTIFIER", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", "UNQUOTED_ID_BODY_WITH_PATTERN", + "UNQUOTED_ID_PATTERN", "PROJECT_UNQUOTED_IDENTIFIER", "PROJECT_QUOTED_IDENTIFIER", + "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "RENAME_PIPE", + "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", "AS", "RENAME_QUOTED_IDENTIFIER", + "RENAME_UNQUOTED_IDENTIFIER", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", + "RENAME_WS", "ENRICH_PIPE", "ENRICH_OPENING_BRACKET", "ON", "WITH", "ENRICH_POLICY_NAME_BODY", + "ENRICH_POLICY_NAME", "ENRICH_QUOTED_IDENTIFIER", "ENRICH_MODE_UNQUOTED_VALUE", + "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_PIPE", + "ENRICH_FIELD_ASSIGN", "ENRICH_FIELD_COMMA", "ENRICH_FIELD_DOT", "ENRICH_FIELD_WITH", + "ENRICH_FIELD_UNQUOTED_IDENTIFIER", "ENRICH_FIELD_QUOTED_IDENTIFIER", + "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", + "MVEXPAND_PIPE", "MVEXPAND_DOT", "MVEXPAND_QUOTED_IDENTIFIER", "MVEXPAND_UNQUOTED_IDENTIFIER", + "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", + "SHOW_PIPE", "INFO", "FUNCTIONS", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", + "SHOW_WS", "SETTING_CLOSING_BRACKET", "COLON", "SETTING", "SETTING_LINE_COMMENT", + "SETTTING_MULTILINE_COMMENT", "SETTING_WS" }; } public static final String[] ruleNames = makeRuleNames(); @@ -90,25 +90,25 @@ private static String[] makeRuleNames() { private static String[] makeLiteralNames() { return new String[] { null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", - "'grok'", "'inlinestats'", "'keep'", "'limit'", "'mv_expand'", "'project'", - "'rename'", "'row'", "'show'", "'sort'", "'stats'", "'where'", null, - null, null, null, null, null, null, "'|'", null, null, null, "'by'", - "'and'", "'asc'", "'='", "','", "'desc'", "'.'", "'false'", "'first'", - "'last'", "'('", "'in'", "'is'", "'like'", "'not'", "'null'", "'nulls'", - "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", - "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, "']'", - null, null, null, null, null, "'metadata'", null, null, null, null, null, - null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, - null, null, null, null, null, null, null, null, "'info'", "'functions'", - null, null, null, "':'" + "'grok'", "'inlinestats'", "'keep'", "'limit'", "'mv_expand'", "'rename'", + "'row'", "'show'", "'sort'", "'stats'", "'where'", null, null, null, + null, null, null, null, "'|'", null, null, null, "'by'", "'and'", "'asc'", + "'='", "','", "'desc'", "'.'", "'false'", "'first'", "'last'", "'('", + "'in'", "'is'", "'like'", "'not'", "'null'", "'nulls'", "'or'", "'?'", + "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", + "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, "']'", null, null, null, + null, null, "'metadata'", null, null, null, null, null, null, null, null, + "'as'", null, null, null, "'on'", "'with'", null, null, null, null, null, + null, null, null, null, null, "'info'", "'functions'", null, null, null, + "':'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); private static String[] makeSymbolicNames() { return new String[] { null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", - "INLINESTATS", "KEEP", "LIMIT", "MV_EXPAND", "PROJECT", "RENAME", "ROW", - "SHOW", "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", + "INLINESTATS", "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", + "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "PIPE", "STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "LAST", "LP", @@ -187,7 +187,7 @@ public EsqlBaseLexer(CharStream input) { public ATN getATN() { return _ATN; } public static final String _serializedATN = - "\u0004\u0000i\u0489\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ + "\u0004\u0000h\u047d\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ "\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002\u0002\u0007\u0002"+ @@ -227,694 +227,688 @@ public EsqlBaseLexer(CharStream input) { "\u008d\u0007\u008d\u0002\u008e\u0007\u008e\u0002\u008f\u0007\u008f\u0002"+ "\u0090\u0007\u0090\u0002\u0091\u0007\u0091\u0002\u0092\u0007\u0092\u0002"+ "\u0093\u0007\u0093\u0002\u0094\u0007\u0094\u0002\u0095\u0007\u0095\u0002"+ - "\u0096\u0007\u0096\u0002\u0097\u0007\u0097\u0002\u0098\u0007\u0098\u0002"+ - "\u0099\u0007\u0099\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ + "\u0096\u0007\u0096\u0002\u0097\u0007\u0097\u0002\u0098\u0007\u0098\u0001"+ "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ - "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001"+ + "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001"+ + "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ + "\u0002\u0001\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ "\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ - "\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001"+ - "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007\u0001"+ - "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ + "\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ + "\u0006\u0001\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ - "\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\t"+ - "\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001"+ - "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ - "\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ - "\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001"+ - "\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001"+ - "\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e"+ - "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f"+ - "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010"+ - "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010"+ - "\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011"+ - "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0004\u0012\u01da\b\u0012"+ - "\u000b\u0012\f\u0012\u01db\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013"+ - "\u0001\u0013\u0001\u0013\u0005\u0013\u01e4\b\u0013\n\u0013\f\u0013\u01e7"+ - "\t\u0013\u0001\u0013\u0003\u0013\u01ea\b\u0013\u0001\u0013\u0003\u0013"+ - "\u01ed\b\u0013\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014"+ - "\u0001\u0014\u0001\u0014\u0005\u0014\u01f6\b\u0014\n\u0014\f\u0014\u01f9"+ - "\t\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001"+ - "\u0015\u0004\u0015\u0201\b\u0015\u000b\u0015\f\u0015\u0202\u0001\u0015"+ - "\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016"+ - "\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018"+ + "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b"+ + "\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ + "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\u000b"+ + "\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ + "\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001"+ + "\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\u000e"+ + "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e"+ + "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f"+ + "\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010"+ + "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0004\u0011"+ + "\u01ce\b\u0011\u000b\u0011\f\u0011\u01cf\u0001\u0011\u0001\u0011\u0001"+ + "\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0005\u0012\u01d8\b\u0012\n"+ + "\u0012\f\u0012\u01db\t\u0012\u0001\u0012\u0003\u0012\u01de\b\u0012\u0001"+ + "\u0012\u0003\u0012\u01e1\b\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001"+ + "\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0005\u0013\u01ea\b\u0013\n"+ + "\u0013\f\u0013\u01ed\t\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001"+ + "\u0013\u0001\u0013\u0001\u0014\u0004\u0014\u01f5\b\u0014\u000b\u0014\f"+ + "\u0014\u01f6\u0001\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0001\u0015"+ + "\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016"+ + "\u0001\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018"+ "\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001\u0019"+ "\u0001\u0019\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b"+ - "\u0001\u001b\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001d"+ - "\u0001\u001d\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f"+ - "\u0001 \u0001 \u0003 \u022c\b \u0001 \u0004 \u022f\b \u000b \f \u0230"+ - "\u0001!\u0001!\u0001\"\u0001\"\u0001#\u0001#\u0001#\u0003#\u023a\b#\u0001"+ - "$\u0001$\u0001%\u0001%\u0001%\u0003%\u0241\b%\u0001&\u0001&\u0001&\u0005"+ - "&\u0246\b&\n&\f&\u0249\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0001&\u0005"+ - "&\u0251\b&\n&\f&\u0254\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0003&\u025b"+ - "\b&\u0001&\u0003&\u025e\b&\u0003&\u0260\b&\u0001\'\u0004\'\u0263\b\'\u000b"+ - "\'\f\'\u0264\u0001(\u0004(\u0268\b(\u000b(\f(\u0269\u0001(\u0001(\u0005"+ - "(\u026e\b(\n(\f(\u0271\t(\u0001(\u0001(\u0004(\u0275\b(\u000b(\f(\u0276"+ - "\u0001(\u0004(\u027a\b(\u000b(\f(\u027b\u0001(\u0001(\u0005(\u0280\b("+ - "\n(\f(\u0283\t(\u0003(\u0285\b(\u0001(\u0001(\u0001(\u0001(\u0004(\u028b"+ - "\b(\u000b(\f(\u028c\u0001(\u0001(\u0003(\u0291\b(\u0001)\u0001)\u0001"+ - ")\u0001*\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001"+ - ",\u0001-\u0001-\u0001.\u0001.\u0001.\u0001.\u0001.\u0001/\u0001/\u0001"+ - "0\u00010\u00010\u00010\u00010\u00010\u00011\u00011\u00011\u00011\u0001"+ - "1\u00011\u00012\u00012\u00012\u00012\u00012\u00013\u00013\u00014\u0001"+ - "4\u00014\u00015\u00015\u00015\u00016\u00016\u00016\u00016\u00016\u0001"+ - "7\u00017\u00017\u00017\u00018\u00018\u00018\u00018\u00018\u00019\u0001"+ - "9\u00019\u00019\u00019\u00019\u0001:\u0001:\u0001:\u0001;\u0001;\u0001"+ - "<\u0001<\u0001<\u0001<\u0001<\u0001<\u0001=\u0001=\u0001>\u0001>\u0001"+ - ">\u0001>\u0001>\u0001?\u0001?\u0001?\u0001@\u0001@\u0001@\u0001A\u0001"+ - "A\u0001A\u0001B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001D\u0001E\u0001"+ - "E\u0001E\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001I\u0001I\u0001"+ - "J\u0001J\u0001K\u0001K\u0001K\u0001K\u0001K\u0001L\u0001L\u0001L\u0001"+ - "L\u0001L\u0001M\u0001M\u0005M\u0311\bM\nM\fM\u0314\tM\u0001M\u0001M\u0003"+ - "M\u0318\bM\u0001M\u0004M\u031b\bM\u000bM\fM\u031c\u0003M\u031f\bM\u0001"+ - "N\u0001N\u0004N\u0323\bN\u000bN\fN\u0324\u0001N\u0001N\u0001O\u0001O\u0001"+ - "O\u0001O\u0001P\u0001P\u0001P\u0001P\u0001Q\u0001Q\u0001Q\u0001Q\u0001"+ - "R\u0001R\u0001R\u0001R\u0001R\u0001S\u0001S\u0001S\u0001S\u0001T\u0001"+ - "T\u0001T\u0001T\u0001U\u0001U\u0001U\u0001U\u0001V\u0001V\u0001V\u0001"+ - "V\u0001W\u0001W\u0001W\u0001W\u0001W\u0001W\u0001W\u0001W\u0001W\u0001"+ - "X\u0001X\u0001X\u0003X\u0356\bX\u0001Y\u0004Y\u0359\bY\u000bY\fY\u035a"+ - "\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001"+ - "\\\u0001\\\u0001\\\u0001]\u0001]\u0001]\u0001]\u0001^\u0001^\u0001^\u0001"+ - "^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001`\u0001`\u0001"+ - "a\u0001a\u0001a\u0001a\u0003a\u037e\ba\u0001b\u0001b\u0003b\u0382\bb\u0001"+ - "b\u0005b\u0385\bb\nb\fb\u0388\tb\u0001b\u0001b\u0003b\u038c\bb\u0001b"+ - "\u0004b\u038f\bb\u000bb\fb\u0390\u0003b\u0393\bb\u0001c\u0001c\u0001c"+ - "\u0001c\u0001d\u0001d\u0001d\u0001d\u0001e\u0001e\u0001e\u0001e\u0001"+ - "f\u0001f\u0001f\u0001f\u0001g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001"+ - "h\u0001h\u0001h\u0001i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001"+ - "j\u0001k\u0001k\u0001k\u0001k\u0001l\u0001l\u0001l\u0001m\u0001m\u0001"+ - "m\u0001m\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001"+ - "p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001r\u0001r\u0001"+ - "r\u0001r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001s\u0001t\u0001t\u0001"+ - "t\u0001t\u0001t\u0001u\u0001u\u0001u\u0001u\u0001u\u0001u\u0001u\u0001"+ - "v\u0001v\u0001w\u0001w\u0003w\u03eb\bw\u0001w\u0005w\u03ee\bw\nw\fw\u03f1"+ - "\tw\u0001x\u0001x\u0001x\u0001x\u0001y\u0001y\u0001y\u0001y\u0001z\u0001"+ - "z\u0001z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001"+ - "|\u0001}\u0001}\u0001}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001~\u0001"+ - "~\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u0080\u0001\u0080"+ - "\u0001\u0080\u0001\u0080\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0081"+ - "\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0083\u0001\u0083"+ - "\u0001\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084"+ - "\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0086\u0001\u0086"+ - "\u0001\u0086\u0001\u0086\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0087"+ - "\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0089"+ - "\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001\u008a\u0001\u008a"+ - "\u0001\u008a\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008c"+ - "\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001\u008d\u0001\u008d"+ - "\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e"+ - "\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090"+ - "\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0090"+ - "\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091"+ - "\u0001\u0091\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0093"+ - "\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094\u0001\u0094"+ - "\u0001\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001\u0096\u0001\u0096"+ - "\u0001\u0096\u0001\u0096\u0001\u0096\u0004\u0096\u047a\b\u0096\u000b\u0096"+ - "\f\u0096\u047b\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0097\u0001\u0098"+ - "\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0099\u0001\u0099\u0001\u0099"+ - "\u0001\u0099\u0002\u01f7\u0252\u0000\u009a\u000b\u0001\r\u0002\u000f\u0003"+ - "\u0011\u0004\u0013\u0005\u0015\u0006\u0017\u0007\u0019\b\u001b\t\u001d"+ - "\n\u001f\u000b!\f#\r%\u000e\'\u000f)\u0010+\u0011-\u0012/\u00131\u0014"+ - "3\u00155\u00167\u00009\u0000;\u0017=\u0018?\u0019A\u001aC\u0000E\u0000"+ - "G\u0000I\u0000K\u0000M\u0000O\u0000Q\u0000S\u0000U\u0000W\u001bY\u001c"+ - "[\u001d]\u001e_\u001fa c!e\"g#i$k%m&o\'q(s)u*w+y,{-}.\u007f/\u00810\u0083"+ - "1\u00852\u00873\u00894\u008b5\u008d6\u008f7\u00918\u00939\u0095:\u0097"+ - ";\u0099<\u009b=\u009d>\u009f?\u00a1@\u00a3A\u00a5B\u00a7C\u00a9D\u00ab"+ - "E\u00adF\u00af\u0000\u00b1\u0000\u00b3\u0000\u00b5\u0000\u00b7\u0000\u00b9"+ - "G\u00bb\u0000\u00bdH\u00bf\u0000\u00c1I\u00c3J\u00c5K\u00c7\u0000\u00c9"+ - "\u0000\u00cb\u0000\u00cd\u0000\u00cfL\u00d1\u0000\u00d3\u0000\u00d5M\u00d7"+ - "N\u00d9O\u00db\u0000\u00dd\u0000\u00df\u0000\u00e1\u0000\u00e3P\u00e5"+ - "\u0000\u00e7\u0000\u00e9Q\u00ebR\u00edS\u00ef\u0000\u00f1\u0000\u00f3"+ - "T\u00f5U\u00f7\u0000\u00f9V\u00fb\u0000\u00fd\u0000\u00ffW\u0101X\u0103"+ - "Y\u0105\u0000\u0107\u0000\u0109\u0000\u010b\u0000\u010d\u0000\u010f\u0000"+ - "\u0111\u0000\u0113Z\u0115[\u0117\\\u0119\u0000\u011b\u0000\u011d\u0000"+ - "\u011f\u0000\u0121]\u0123^\u0125_\u0127\u0000\u0129`\u012ba\u012db\u012f"+ - "c\u0131d\u0133\u0000\u0135e\u0137f\u0139g\u013bh\u013di\u000b\u0000\u0001"+ - "\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\r\u0006\u0000\t\n\r\r //["+ - "[]]\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r\r \u0001\u000009\u0002\u0000"+ - "AZaz\u0005\u0000\"\"\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002\u0000"+ - "EEee\u0002\u0000++--\u0001\u0000``\n\u0000\t\n\r\r ,,//==[[]]``||\u0002"+ - "\u0000**//\u000b\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u04a4\u0000\u000b"+ - "\u0001\u0000\u0000\u0000\u0000\r\u0001\u0000\u0000\u0000\u0000\u000f\u0001"+ - "\u0000\u0000\u0000\u0000\u0011\u0001\u0000\u0000\u0000\u0000\u0013\u0001"+ - "\u0000\u0000\u0000\u0000\u0015\u0001\u0000\u0000\u0000\u0000\u0017\u0001"+ - "\u0000\u0000\u0000\u0000\u0019\u0001\u0000\u0000\u0000\u0000\u001b\u0001"+ - "\u0000\u0000\u0000\u0000\u001d\u0001\u0000\u0000\u0000\u0000\u001f\u0001"+ - "\u0000\u0000\u0000\u0000!\u0001\u0000\u0000\u0000\u0000#\u0001\u0000\u0000"+ - "\u0000\u0000%\u0001\u0000\u0000\u0000\u0000\'\u0001\u0000\u0000\u0000"+ - "\u0000)\u0001\u0000\u0000\u0000\u0000+\u0001\u0000\u0000\u0000\u0000-"+ - "\u0001\u0000\u0000\u0000\u0000/\u0001\u0000\u0000\u0000\u00001\u0001\u0000"+ - "\u0000\u0000\u00003\u0001\u0000\u0000\u0000\u00005\u0001\u0000\u0000\u0000"+ - "\u00017\u0001\u0000\u0000\u0000\u00019\u0001\u0000\u0000\u0000\u0001;"+ - "\u0001\u0000\u0000\u0000\u0001=\u0001\u0000\u0000\u0000\u0001?\u0001\u0000"+ - "\u0000\u0000\u0002A\u0001\u0000\u0000\u0000\u0002W\u0001\u0000\u0000\u0000"+ - "\u0002Y\u0001\u0000\u0000\u0000\u0002[\u0001\u0000\u0000\u0000\u0002]"+ - "\u0001\u0000\u0000\u0000\u0002_\u0001\u0000\u0000\u0000\u0002a\u0001\u0000"+ - "\u0000\u0000\u0002c\u0001\u0000\u0000\u0000\u0002e\u0001\u0000\u0000\u0000"+ - "\u0002g\u0001\u0000\u0000\u0000\u0002i\u0001\u0000\u0000\u0000\u0002k"+ - "\u0001\u0000\u0000\u0000\u0002m\u0001\u0000\u0000\u0000\u0002o\u0001\u0000"+ - "\u0000\u0000\u0002q\u0001\u0000\u0000\u0000\u0002s\u0001\u0000\u0000\u0000"+ - "\u0002u\u0001\u0000\u0000\u0000\u0002w\u0001\u0000\u0000\u0000\u0002y"+ - "\u0001\u0000\u0000\u0000\u0002{\u0001\u0000\u0000\u0000\u0002}\u0001\u0000"+ - "\u0000\u0000\u0002\u007f\u0001\u0000\u0000\u0000\u0002\u0081\u0001\u0000"+ - "\u0000\u0000\u0002\u0083\u0001\u0000\u0000\u0000\u0002\u0085\u0001\u0000"+ - "\u0000\u0000\u0002\u0087\u0001\u0000\u0000\u0000\u0002\u0089\u0001\u0000"+ - "\u0000\u0000\u0002\u008b\u0001\u0000\u0000\u0000\u0002\u008d\u0001\u0000"+ - "\u0000\u0000\u0002\u008f\u0001\u0000\u0000\u0000\u0002\u0091\u0001\u0000"+ - "\u0000\u0000\u0002\u0093\u0001\u0000\u0000\u0000\u0002\u0095\u0001\u0000"+ - "\u0000\u0000\u0002\u0097\u0001\u0000\u0000\u0000\u0002\u0099\u0001\u0000"+ - "\u0000\u0000\u0002\u009b\u0001\u0000\u0000\u0000\u0002\u009d\u0001\u0000"+ - "\u0000\u0000\u0002\u009f\u0001\u0000\u0000\u0000\u0002\u00a1\u0001\u0000"+ - "\u0000\u0000\u0002\u00a3\u0001\u0000\u0000\u0000\u0002\u00a5\u0001\u0000"+ - "\u0000\u0000\u0002\u00a7\u0001\u0000\u0000\u0000\u0002\u00a9\u0001\u0000"+ - "\u0000\u0000\u0002\u00ab\u0001\u0000\u0000\u0000\u0002\u00ad\u0001\u0000"+ - "\u0000\u0000\u0003\u00af\u0001\u0000\u0000\u0000\u0003\u00b1\u0001\u0000"+ - "\u0000\u0000\u0003\u00b3\u0001\u0000\u0000\u0000\u0003\u00b5\u0001\u0000"+ - "\u0000\u0000\u0003\u00b7\u0001\u0000\u0000\u0000\u0003\u00b9\u0001\u0000"+ - "\u0000\u0000\u0003\u00bd\u0001\u0000\u0000\u0000\u0003\u00bf\u0001\u0000"+ - "\u0000\u0000\u0003\u00c1\u0001\u0000\u0000\u0000\u0003\u00c3\u0001\u0000"+ - "\u0000\u0000\u0003\u00c5\u0001\u0000\u0000\u0000\u0004\u00c7\u0001\u0000"+ - "\u0000\u0000\u0004\u00c9\u0001\u0000\u0000\u0000\u0004\u00cb\u0001\u0000"+ - "\u0000\u0000\u0004\u00cf\u0001\u0000\u0000\u0000\u0004\u00d1\u0001\u0000"+ - "\u0000\u0000\u0004\u00d3\u0001\u0000\u0000\u0000\u0004\u00d5\u0001\u0000"+ - "\u0000\u0000\u0004\u00d7\u0001\u0000\u0000\u0000\u0004\u00d9\u0001\u0000"+ - "\u0000\u0000\u0005\u00db\u0001\u0000\u0000\u0000\u0005\u00dd\u0001\u0000"+ - "\u0000\u0000\u0005\u00df\u0001\u0000\u0000\u0000\u0005\u00e1\u0001\u0000"+ - "\u0000\u0000\u0005\u00e3\u0001\u0000\u0000\u0000\u0005\u00e5\u0001\u0000"+ - "\u0000\u0000\u0005\u00e7\u0001\u0000\u0000\u0000\u0005\u00e9\u0001\u0000"+ - "\u0000\u0000\u0005\u00eb\u0001\u0000\u0000\u0000\u0005\u00ed\u0001\u0000"+ - "\u0000\u0000\u0006\u00ef\u0001\u0000\u0000\u0000\u0006\u00f1\u0001\u0000"+ - "\u0000\u0000\u0006\u00f3\u0001\u0000\u0000\u0000\u0006\u00f5\u0001\u0000"+ - "\u0000\u0000\u0006\u00f9\u0001\u0000\u0000\u0000\u0006\u00fb\u0001\u0000"+ - "\u0000\u0000\u0006\u00fd\u0001\u0000\u0000\u0000\u0006\u00ff\u0001\u0000"+ - "\u0000\u0000\u0006\u0101\u0001\u0000\u0000\u0000\u0006\u0103\u0001\u0000"+ - "\u0000\u0000\u0007\u0105\u0001\u0000\u0000\u0000\u0007\u0107\u0001\u0000"+ - "\u0000\u0000\u0007\u0109\u0001\u0000\u0000\u0000\u0007\u010b\u0001\u0000"+ - "\u0000\u0000\u0007\u010d\u0001\u0000\u0000\u0000\u0007\u010f\u0001\u0000"+ - "\u0000\u0000\u0007\u0111\u0001\u0000\u0000\u0000\u0007\u0113\u0001\u0000"+ - "\u0000\u0000\u0007\u0115\u0001\u0000\u0000\u0000\u0007\u0117\u0001\u0000"+ - "\u0000\u0000\b\u0119\u0001\u0000\u0000\u0000\b\u011b\u0001\u0000\u0000"+ - "\u0000\b\u011d\u0001\u0000\u0000\u0000\b\u011f\u0001\u0000\u0000\u0000"+ - "\b\u0121\u0001\u0000\u0000\u0000\b\u0123\u0001\u0000\u0000\u0000\b\u0125"+ - "\u0001\u0000\u0000\u0000\t\u0127\u0001\u0000\u0000\u0000\t\u0129\u0001"+ - "\u0000\u0000\u0000\t\u012b\u0001\u0000\u0000\u0000\t\u012d\u0001\u0000"+ - "\u0000\u0000\t\u012f\u0001\u0000\u0000\u0000\t\u0131\u0001\u0000\u0000"+ - "\u0000\n\u0133\u0001\u0000\u0000\u0000\n\u0135\u0001\u0000\u0000\u0000"+ - "\n\u0137\u0001\u0000\u0000\u0000\n\u0139\u0001\u0000\u0000\u0000\n\u013b"+ - "\u0001\u0000\u0000\u0000\n\u013d\u0001\u0000\u0000\u0000\u000b\u013f\u0001"+ - "\u0000\u0000\u0000\r\u0149\u0001\u0000\u0000\u0000\u000f\u0150\u0001\u0000"+ - "\u0000\u0000\u0011\u0159\u0001\u0000\u0000\u0000\u0013\u0160\u0001\u0000"+ - "\u0000\u0000\u0015\u016a\u0001\u0000\u0000\u0000\u0017\u0171\u0001\u0000"+ - "\u0000\u0000\u0019\u0178\u0001\u0000\u0000\u0000\u001b\u0186\u0001\u0000"+ - "\u0000\u0000\u001d\u018d\u0001\u0000\u0000\u0000\u001f\u0195\u0001\u0000"+ - "\u0000\u0000!\u01a1\u0001\u0000\u0000\u0000#\u01ab\u0001\u0000\u0000\u0000"+ - "%\u01b4\u0001\u0000\u0000\u0000\'\u01ba\u0001\u0000\u0000\u0000)\u01c1"+ - "\u0001\u0000\u0000\u0000+\u01c8\u0001\u0000\u0000\u0000-\u01d0\u0001\u0000"+ - "\u0000\u0000/\u01d9\u0001\u0000\u0000\u00001\u01df\u0001\u0000\u0000\u0000"+ - "3\u01f0\u0001\u0000\u0000\u00005\u0200\u0001\u0000\u0000\u00007\u0206"+ - "\u0001\u0000\u0000\u00009\u020b\u0001\u0000\u0000\u0000;\u0210\u0001\u0000"+ - "\u0000\u0000=\u0214\u0001\u0000\u0000\u0000?\u0218\u0001\u0000\u0000\u0000"+ - "A\u021c\u0001\u0000\u0000\u0000C\u0220\u0001\u0000\u0000\u0000E\u0222"+ - "\u0001\u0000\u0000\u0000G\u0224\u0001\u0000\u0000\u0000I\u0227\u0001\u0000"+ - "\u0000\u0000K\u0229\u0001\u0000\u0000\u0000M\u0232\u0001\u0000\u0000\u0000"+ - "O\u0234\u0001\u0000\u0000\u0000Q\u0239\u0001\u0000\u0000\u0000S\u023b"+ - "\u0001\u0000\u0000\u0000U\u0240\u0001\u0000\u0000\u0000W\u025f\u0001\u0000"+ - "\u0000\u0000Y\u0262\u0001\u0000\u0000\u0000[\u0290\u0001\u0000\u0000\u0000"+ - "]\u0292\u0001\u0000\u0000\u0000_\u0295\u0001\u0000\u0000\u0000a\u0299"+ - "\u0001\u0000\u0000\u0000c\u029d\u0001\u0000\u0000\u0000e\u029f\u0001\u0000"+ - "\u0000\u0000g\u02a1\u0001\u0000\u0000\u0000i\u02a6\u0001\u0000\u0000\u0000"+ - "k\u02a8\u0001\u0000\u0000\u0000m\u02ae\u0001\u0000\u0000\u0000o\u02b4"+ - "\u0001\u0000\u0000\u0000q\u02b9\u0001\u0000\u0000\u0000s\u02bb\u0001\u0000"+ - "\u0000\u0000u\u02be\u0001\u0000\u0000\u0000w\u02c1\u0001\u0000\u0000\u0000"+ - "y\u02c6\u0001\u0000\u0000\u0000{\u02ca\u0001\u0000\u0000\u0000}\u02cf"+ - "\u0001\u0000\u0000\u0000\u007f\u02d5\u0001\u0000\u0000\u0000\u0081\u02d8"+ - "\u0001\u0000\u0000\u0000\u0083\u02da\u0001\u0000\u0000\u0000\u0085\u02e0"+ - "\u0001\u0000\u0000\u0000\u0087\u02e2\u0001\u0000\u0000\u0000\u0089\u02e7"+ - "\u0001\u0000\u0000\u0000\u008b\u02ea\u0001\u0000\u0000\u0000\u008d\u02ed"+ - "\u0001\u0000\u0000\u0000\u008f\u02f0\u0001\u0000\u0000\u0000\u0091\u02f2"+ - "\u0001\u0000\u0000\u0000\u0093\u02f5\u0001\u0000\u0000\u0000\u0095\u02f7"+ - "\u0001\u0000\u0000\u0000\u0097\u02fa\u0001\u0000\u0000\u0000\u0099\u02fc"+ - "\u0001\u0000\u0000\u0000\u009b\u02fe\u0001\u0000\u0000\u0000\u009d\u0300"+ - "\u0001\u0000\u0000\u0000\u009f\u0302\u0001\u0000\u0000\u0000\u00a1\u0304"+ - "\u0001\u0000\u0000\u0000\u00a3\u0309\u0001\u0000\u0000\u0000\u00a5\u031e"+ - "\u0001\u0000\u0000\u0000\u00a7\u0320\u0001\u0000\u0000\u0000\u00a9\u0328"+ - "\u0001\u0000\u0000\u0000\u00ab\u032c\u0001\u0000\u0000\u0000\u00ad\u0330"+ - "\u0001\u0000\u0000\u0000\u00af\u0334\u0001\u0000\u0000\u0000\u00b1\u0339"+ - "\u0001\u0000\u0000\u0000\u00b3\u033d\u0001\u0000\u0000\u0000\u00b5\u0341"+ - "\u0001\u0000\u0000\u0000\u00b7\u0345\u0001\u0000\u0000\u0000\u00b9\u0349"+ - "\u0001\u0000\u0000\u0000\u00bb\u0355\u0001\u0000\u0000\u0000\u00bd\u0358"+ - "\u0001\u0000\u0000\u0000\u00bf\u035c\u0001\u0000\u0000\u0000\u00c1\u0360"+ - "\u0001\u0000\u0000\u0000\u00c3\u0364\u0001\u0000\u0000\u0000\u00c5\u0368"+ - "\u0001\u0000\u0000\u0000\u00c7\u036c\u0001\u0000\u0000\u0000\u00c9\u0371"+ - "\u0001\u0000\u0000\u0000\u00cb\u0375\u0001\u0000\u0000\u0000\u00cd\u037d"+ - "\u0001\u0000\u0000\u0000\u00cf\u0392\u0001\u0000\u0000\u0000\u00d1\u0394"+ - "\u0001\u0000\u0000\u0000\u00d3\u0398\u0001\u0000\u0000\u0000\u00d5\u039c"+ - "\u0001\u0000\u0000\u0000\u00d7\u03a0\u0001\u0000\u0000\u0000\u00d9\u03a4"+ - "\u0001\u0000\u0000\u0000\u00db\u03a8\u0001\u0000\u0000\u0000\u00dd\u03ad"+ - "\u0001\u0000\u0000\u0000\u00df\u03b1\u0001\u0000\u0000\u0000\u00e1\u03b5"+ - "\u0001\u0000\u0000\u0000\u00e3\u03b9\u0001\u0000\u0000\u0000\u00e5\u03bc"+ - "\u0001\u0000\u0000\u0000\u00e7\u03c0\u0001\u0000\u0000\u0000\u00e9\u03c4"+ - "\u0001\u0000\u0000\u0000\u00eb\u03c8\u0001\u0000\u0000\u0000\u00ed\u03cc"+ - "\u0001\u0000\u0000\u0000\u00ef\u03d0\u0001\u0000\u0000\u0000\u00f1\u03d5"+ - "\u0001\u0000\u0000\u0000\u00f3\u03da\u0001\u0000\u0000\u0000\u00f5\u03df"+ - "\u0001\u0000\u0000\u0000\u00f7\u03e6\u0001\u0000\u0000\u0000\u00f9\u03ea"+ - "\u0001\u0000\u0000\u0000\u00fb\u03f2\u0001\u0000\u0000\u0000\u00fd\u03f6"+ - "\u0001\u0000\u0000\u0000\u00ff\u03fa\u0001\u0000\u0000\u0000\u0101\u03fe"+ - "\u0001\u0000\u0000\u0000\u0103\u0402\u0001\u0000\u0000\u0000\u0105\u0406"+ - "\u0001\u0000\u0000\u0000\u0107\u040c\u0001\u0000\u0000\u0000\u0109\u0410"+ - "\u0001\u0000\u0000\u0000\u010b\u0414\u0001\u0000\u0000\u0000\u010d\u0418"+ - "\u0001\u0000\u0000\u0000\u010f\u041c\u0001\u0000\u0000\u0000\u0111\u0420"+ - "\u0001\u0000\u0000\u0000\u0113\u0424\u0001\u0000\u0000\u0000\u0115\u0428"+ - "\u0001\u0000\u0000\u0000\u0117\u042c\u0001\u0000\u0000\u0000\u0119\u0430"+ - "\u0001\u0000\u0000\u0000\u011b\u0435\u0001\u0000\u0000\u0000\u011d\u0439"+ - "\u0001\u0000\u0000\u0000\u011f\u043d\u0001\u0000\u0000\u0000\u0121\u0441"+ - "\u0001\u0000\u0000\u0000\u0123\u0445\u0001\u0000\u0000\u0000\u0125\u0449"+ - "\u0001\u0000\u0000\u0000\u0127\u044d\u0001\u0000\u0000\u0000\u0129\u0452"+ - "\u0001\u0000\u0000\u0000\u012b\u0457\u0001\u0000\u0000\u0000\u012d\u0461"+ - "\u0001\u0000\u0000\u0000\u012f\u0465\u0001\u0000\u0000\u0000\u0131\u0469"+ - "\u0001\u0000\u0000\u0000\u0133\u046d\u0001\u0000\u0000\u0000\u0135\u0472"+ - "\u0001\u0000\u0000\u0000\u0137\u0479\u0001\u0000\u0000\u0000\u0139\u047d"+ - "\u0001\u0000\u0000\u0000\u013b\u0481\u0001\u0000\u0000\u0000\u013d\u0485"+ - "\u0001\u0000\u0000\u0000\u013f\u0140\u0005d\u0000\u0000\u0140\u0141\u0005"+ - "i\u0000\u0000\u0141\u0142\u0005s\u0000\u0000\u0142\u0143\u0005s\u0000"+ - "\u0000\u0143\u0144\u0005e\u0000\u0000\u0144\u0145\u0005c\u0000\u0000\u0145"+ - "\u0146\u0005t\u0000\u0000\u0146\u0147\u0001\u0000\u0000\u0000\u0147\u0148"+ - "\u0006\u0000\u0000\u0000\u0148\f\u0001\u0000\u0000\u0000\u0149\u014a\u0005"+ - "d\u0000\u0000\u014a\u014b\u0005r\u0000\u0000\u014b\u014c\u0005o\u0000"+ - "\u0000\u014c\u014d\u0005p\u0000\u0000\u014d\u014e\u0001\u0000\u0000\u0000"+ - "\u014e\u014f\u0006\u0001\u0001\u0000\u014f\u000e\u0001\u0000\u0000\u0000"+ - "\u0150\u0151\u0005e\u0000\u0000\u0151\u0152\u0005n\u0000\u0000\u0152\u0153"+ - "\u0005r\u0000\u0000\u0153\u0154\u0005i\u0000\u0000\u0154\u0155\u0005c"+ - "\u0000\u0000\u0155\u0156\u0005h\u0000\u0000\u0156\u0157\u0001\u0000\u0000"+ - "\u0000\u0157\u0158\u0006\u0002\u0002\u0000\u0158\u0010\u0001\u0000\u0000"+ - "\u0000\u0159\u015a\u0005e\u0000\u0000\u015a\u015b\u0005v\u0000\u0000\u015b"+ - "\u015c\u0005a\u0000\u0000\u015c\u015d\u0005l\u0000\u0000\u015d\u015e\u0001"+ - "\u0000\u0000\u0000\u015e\u015f\u0006\u0003\u0000\u0000\u015f\u0012\u0001"+ - "\u0000\u0000\u0000\u0160\u0161\u0005e\u0000\u0000\u0161\u0162\u0005x\u0000"+ - "\u0000\u0162\u0163\u0005p\u0000\u0000\u0163\u0164\u0005l\u0000\u0000\u0164"+ - "\u0165\u0005a\u0000\u0000\u0165\u0166\u0005i\u0000\u0000\u0166\u0167\u0005"+ - "n\u0000\u0000\u0167\u0168\u0001\u0000\u0000\u0000\u0168\u0169\u0006\u0004"+ - "\u0003\u0000\u0169\u0014\u0001\u0000\u0000\u0000\u016a\u016b\u0005f\u0000"+ - "\u0000\u016b\u016c\u0005r\u0000\u0000\u016c\u016d\u0005o\u0000\u0000\u016d"+ - "\u016e\u0005m\u0000\u0000\u016e\u016f\u0001\u0000\u0000\u0000\u016f\u0170"+ - "\u0006\u0005\u0004\u0000\u0170\u0016\u0001\u0000\u0000\u0000\u0171\u0172"+ - "\u0005g\u0000\u0000\u0172\u0173\u0005r\u0000\u0000\u0173\u0174\u0005o"+ - "\u0000\u0000\u0174\u0175\u0005k\u0000\u0000\u0175\u0176\u0001\u0000\u0000"+ - "\u0000\u0176\u0177\u0006\u0006\u0000\u0000\u0177\u0018\u0001\u0000\u0000"+ - "\u0000\u0178\u0179\u0005i\u0000\u0000\u0179\u017a\u0005n\u0000\u0000\u017a"+ - "\u017b\u0005l\u0000\u0000\u017b\u017c\u0005i\u0000\u0000\u017c\u017d\u0005"+ - "n\u0000\u0000\u017d\u017e\u0005e\u0000\u0000\u017e\u017f\u0005s\u0000"+ - "\u0000\u017f\u0180\u0005t\u0000\u0000\u0180\u0181\u0005a\u0000\u0000\u0181"+ - "\u0182\u0005t\u0000\u0000\u0182\u0183\u0005s\u0000\u0000\u0183\u0184\u0001"+ - "\u0000\u0000\u0000\u0184\u0185\u0006\u0007\u0000\u0000\u0185\u001a\u0001"+ - "\u0000\u0000\u0000\u0186\u0187\u0005k\u0000\u0000\u0187\u0188\u0005e\u0000"+ - "\u0000\u0188\u0189\u0005e\u0000\u0000\u0189\u018a\u0005p\u0000\u0000\u018a"+ - "\u018b\u0001\u0000\u0000\u0000\u018b\u018c\u0006\b\u0001\u0000\u018c\u001c"+ - "\u0001\u0000\u0000\u0000\u018d\u018e\u0005l\u0000\u0000\u018e\u018f\u0005"+ - "i\u0000\u0000\u018f\u0190\u0005m\u0000\u0000\u0190\u0191\u0005i\u0000"+ - "\u0000\u0191\u0192\u0005t\u0000\u0000\u0192\u0193\u0001\u0000\u0000\u0000"+ - "\u0193\u0194\u0006\t\u0000\u0000\u0194\u001e\u0001\u0000\u0000\u0000\u0195"+ - "\u0196\u0005m\u0000\u0000\u0196\u0197\u0005v\u0000\u0000\u0197\u0198\u0005"+ - "_\u0000\u0000\u0198\u0199\u0005e\u0000\u0000\u0199\u019a\u0005x\u0000"+ - "\u0000\u019a\u019b\u0005p\u0000\u0000\u019b\u019c\u0005a\u0000\u0000\u019c"+ - "\u019d\u0005n\u0000\u0000\u019d\u019e\u0005d\u0000\u0000\u019e\u019f\u0001"+ - "\u0000\u0000\u0000\u019f\u01a0\u0006\n\u0005\u0000\u01a0 \u0001\u0000"+ - "\u0000\u0000\u01a1\u01a2\u0005p\u0000\u0000\u01a2\u01a3\u0005r\u0000\u0000"+ - "\u01a3\u01a4\u0005o\u0000\u0000\u01a4\u01a5\u0005j\u0000\u0000\u01a5\u01a6"+ - "\u0005e\u0000\u0000\u01a6\u01a7\u0005c\u0000\u0000\u01a7\u01a8\u0005t"+ - "\u0000\u0000\u01a8\u01a9\u0001\u0000\u0000\u0000\u01a9\u01aa\u0006\u000b"+ - "\u0001\u0000\u01aa\"\u0001\u0000\u0000\u0000\u01ab\u01ac\u0005r\u0000"+ - "\u0000\u01ac\u01ad\u0005e\u0000\u0000\u01ad\u01ae\u0005n\u0000\u0000\u01ae"+ - "\u01af\u0005a\u0000\u0000\u01af\u01b0\u0005m\u0000\u0000\u01b0\u01b1\u0005"+ - "e\u0000\u0000\u01b1\u01b2\u0001\u0000\u0000\u0000\u01b2\u01b3\u0006\f"+ - "\u0006\u0000\u01b3$\u0001\u0000\u0000\u0000\u01b4\u01b5\u0005r\u0000\u0000"+ - "\u01b5\u01b6\u0005o\u0000\u0000\u01b6\u01b7\u0005w\u0000\u0000\u01b7\u01b8"+ - "\u0001\u0000\u0000\u0000\u01b8\u01b9\u0006\r\u0000\u0000\u01b9&\u0001"+ - "\u0000\u0000\u0000\u01ba\u01bb\u0005s\u0000\u0000\u01bb\u01bc\u0005h\u0000"+ - "\u0000\u01bc\u01bd\u0005o\u0000\u0000\u01bd\u01be\u0005w\u0000\u0000\u01be"+ - "\u01bf\u0001\u0000\u0000\u0000\u01bf\u01c0\u0006\u000e\u0007\u0000\u01c0"+ - "(\u0001\u0000\u0000\u0000\u01c1\u01c2\u0005s\u0000\u0000\u01c2\u01c3\u0005"+ - "o\u0000\u0000\u01c3\u01c4\u0005r\u0000\u0000\u01c4\u01c5\u0005t\u0000"+ - "\u0000\u01c5\u01c6\u0001\u0000\u0000\u0000\u01c6\u01c7\u0006\u000f\u0000"+ - "\u0000\u01c7*\u0001\u0000\u0000\u0000\u01c8\u01c9\u0005s\u0000\u0000\u01c9"+ - "\u01ca\u0005t\u0000\u0000\u01ca\u01cb\u0005a\u0000\u0000\u01cb\u01cc\u0005"+ - "t\u0000\u0000\u01cc\u01cd\u0005s\u0000\u0000\u01cd\u01ce\u0001\u0000\u0000"+ - "\u0000\u01ce\u01cf\u0006\u0010\u0000\u0000\u01cf,\u0001\u0000\u0000\u0000"+ - "\u01d0\u01d1\u0005w\u0000\u0000\u01d1\u01d2\u0005h\u0000\u0000\u01d2\u01d3"+ - "\u0005e\u0000\u0000\u01d3\u01d4\u0005r\u0000\u0000\u01d4\u01d5\u0005e"+ - "\u0000\u0000\u01d5\u01d6\u0001\u0000\u0000\u0000\u01d6\u01d7\u0006\u0011"+ - "\u0000\u0000\u01d7.\u0001\u0000\u0000\u0000\u01d8\u01da\b\u0000\u0000"+ - "\u0000\u01d9\u01d8\u0001\u0000\u0000\u0000\u01da\u01db\u0001\u0000\u0000"+ - "\u0000\u01db\u01d9\u0001\u0000\u0000\u0000\u01db\u01dc\u0001\u0000\u0000"+ - "\u0000\u01dc\u01dd\u0001\u0000\u0000\u0000\u01dd\u01de\u0006\u0012\u0000"+ - "\u0000\u01de0\u0001\u0000\u0000\u0000\u01df\u01e0\u0005/\u0000\u0000\u01e0"+ - "\u01e1\u0005/\u0000\u0000\u01e1\u01e5\u0001\u0000\u0000\u0000\u01e2\u01e4"+ - "\b\u0001\u0000\u0000\u01e3\u01e2\u0001\u0000\u0000\u0000\u01e4\u01e7\u0001"+ - "\u0000\u0000\u0000\u01e5\u01e3\u0001\u0000\u0000\u0000\u01e5\u01e6\u0001"+ - "\u0000\u0000\u0000\u01e6\u01e9\u0001\u0000\u0000\u0000\u01e7\u01e5\u0001"+ - "\u0000\u0000\u0000\u01e8\u01ea\u0005\r\u0000\u0000\u01e9\u01e8\u0001\u0000"+ - "\u0000\u0000\u01e9\u01ea\u0001\u0000\u0000\u0000\u01ea\u01ec\u0001\u0000"+ - "\u0000\u0000\u01eb\u01ed\u0005\n\u0000\u0000\u01ec\u01eb\u0001\u0000\u0000"+ - "\u0000\u01ec\u01ed\u0001\u0000\u0000\u0000\u01ed\u01ee\u0001\u0000\u0000"+ - "\u0000\u01ee\u01ef\u0006\u0013\b\u0000\u01ef2\u0001\u0000\u0000\u0000"+ - "\u01f0\u01f1\u0005/\u0000\u0000\u01f1\u01f2\u0005*\u0000\u0000\u01f2\u01f7"+ - "\u0001\u0000\u0000\u0000\u01f3\u01f6\u00033\u0014\u0000\u01f4\u01f6\t"+ - "\u0000\u0000\u0000\u01f5\u01f3\u0001\u0000\u0000\u0000\u01f5\u01f4\u0001"+ - "\u0000\u0000\u0000\u01f6\u01f9\u0001\u0000\u0000\u0000\u01f7\u01f8\u0001"+ - "\u0000\u0000\u0000\u01f7\u01f5\u0001\u0000\u0000\u0000\u01f8\u01fa\u0001"+ - "\u0000\u0000\u0000\u01f9\u01f7\u0001\u0000\u0000\u0000\u01fa\u01fb\u0005"+ - "*\u0000\u0000\u01fb\u01fc\u0005/\u0000\u0000\u01fc\u01fd\u0001\u0000\u0000"+ - "\u0000\u01fd\u01fe\u0006\u0014\b\u0000\u01fe4\u0001\u0000\u0000\u0000"+ - "\u01ff\u0201\u0007\u0002\u0000\u0000\u0200\u01ff\u0001\u0000\u0000\u0000"+ - "\u0201\u0202\u0001\u0000\u0000\u0000\u0202\u0200\u0001\u0000\u0000\u0000"+ - "\u0202\u0203\u0001\u0000\u0000\u0000\u0203\u0204\u0001\u0000\u0000\u0000"+ - "\u0204\u0205\u0006\u0015\b\u0000\u02056\u0001\u0000\u0000\u0000\u0206"+ - "\u0207\u0003\u00a1K\u0000\u0207\u0208\u0001\u0000\u0000\u0000\u0208\u0209"+ - "\u0006\u0016\t\u0000\u0209\u020a\u0006\u0016\n\u0000\u020a8\u0001\u0000"+ - "\u0000\u0000\u020b\u020c\u0003A\u001b\u0000\u020c\u020d\u0001\u0000\u0000"+ - "\u0000\u020d\u020e\u0006\u0017\u000b\u0000\u020e\u020f\u0006\u0017\f\u0000"+ - "\u020f:\u0001\u0000\u0000\u0000\u0210\u0211\u00035\u0015\u0000\u0211\u0212"+ - "\u0001\u0000\u0000\u0000\u0212\u0213\u0006\u0018\b\u0000\u0213<\u0001"+ - "\u0000\u0000\u0000\u0214\u0215\u00031\u0013\u0000\u0215\u0216\u0001\u0000"+ - "\u0000\u0000\u0216\u0217\u0006\u0019\b\u0000\u0217>\u0001\u0000\u0000"+ - "\u0000\u0218\u0219\u00033\u0014\u0000\u0219\u021a\u0001\u0000\u0000\u0000"+ - "\u021a\u021b\u0006\u001a\b\u0000\u021b@\u0001\u0000\u0000\u0000\u021c"+ - "\u021d\u0005|\u0000\u0000\u021d\u021e\u0001\u0000\u0000\u0000\u021e\u021f"+ - "\u0006\u001b\f\u0000\u021fB\u0001\u0000\u0000\u0000\u0220\u0221\u0007"+ - "\u0003\u0000\u0000\u0221D\u0001\u0000\u0000\u0000\u0222\u0223\u0007\u0004"+ - "\u0000\u0000\u0223F\u0001\u0000\u0000\u0000\u0224\u0225\u0005\\\u0000"+ - "\u0000\u0225\u0226\u0007\u0005\u0000\u0000\u0226H\u0001\u0000\u0000\u0000"+ - "\u0227\u0228\b\u0006\u0000\u0000\u0228J\u0001\u0000\u0000\u0000\u0229"+ - "\u022b\u0007\u0007\u0000\u0000\u022a\u022c\u0007\b\u0000\u0000\u022b\u022a"+ - "\u0001\u0000\u0000\u0000\u022b\u022c\u0001\u0000\u0000\u0000\u022c\u022e"+ - "\u0001\u0000\u0000\u0000\u022d\u022f\u0003C\u001c\u0000\u022e\u022d\u0001"+ - "\u0000\u0000\u0000\u022f\u0230\u0001\u0000\u0000\u0000\u0230\u022e\u0001"+ - "\u0000\u0000\u0000\u0230\u0231\u0001\u0000\u0000\u0000\u0231L\u0001\u0000"+ - "\u0000\u0000\u0232\u0233\u0005@\u0000\u0000\u0233N\u0001\u0000\u0000\u0000"+ - "\u0234\u0235\u0005`\u0000\u0000\u0235P\u0001\u0000\u0000\u0000\u0236\u023a"+ - "\b\t\u0000\u0000\u0237\u0238\u0005`\u0000\u0000\u0238\u023a\u0005`\u0000"+ - "\u0000\u0239\u0236\u0001\u0000\u0000\u0000\u0239\u0237\u0001\u0000\u0000"+ - "\u0000\u023aR\u0001\u0000\u0000\u0000\u023b\u023c\u0005_\u0000\u0000\u023c"+ - "T\u0001\u0000\u0000\u0000\u023d\u0241\u0003E\u001d\u0000\u023e\u0241\u0003"+ - "C\u001c\u0000\u023f\u0241\u0003S$\u0000\u0240\u023d\u0001\u0000\u0000"+ - "\u0000\u0240\u023e\u0001\u0000\u0000\u0000\u0240\u023f\u0001\u0000\u0000"+ - "\u0000\u0241V\u0001\u0000\u0000\u0000\u0242\u0247\u0005\"\u0000\u0000"+ - "\u0243\u0246\u0003G\u001e\u0000\u0244\u0246\u0003I\u001f\u0000\u0245\u0243"+ - "\u0001\u0000\u0000\u0000\u0245\u0244\u0001\u0000\u0000\u0000\u0246\u0249"+ - "\u0001\u0000\u0000\u0000\u0247\u0245\u0001\u0000\u0000\u0000\u0247\u0248"+ - "\u0001\u0000\u0000\u0000\u0248\u024a\u0001\u0000\u0000\u0000\u0249\u0247"+ - "\u0001\u0000\u0000\u0000\u024a\u0260\u0005\"\u0000\u0000\u024b\u024c\u0005"+ - "\"\u0000\u0000\u024c\u024d\u0005\"\u0000\u0000\u024d\u024e\u0005\"\u0000"+ - "\u0000\u024e\u0252\u0001\u0000\u0000\u0000\u024f\u0251\b\u0001\u0000\u0000"+ - "\u0250\u024f\u0001\u0000\u0000\u0000\u0251\u0254\u0001\u0000\u0000\u0000"+ - "\u0252\u0253\u0001\u0000\u0000\u0000\u0252\u0250\u0001\u0000\u0000\u0000"+ - "\u0253\u0255\u0001\u0000\u0000\u0000\u0254\u0252\u0001\u0000\u0000\u0000"+ - "\u0255\u0256\u0005\"\u0000\u0000\u0256\u0257\u0005\"\u0000\u0000\u0257"+ - "\u0258\u0005\"\u0000\u0000\u0258\u025a\u0001\u0000\u0000\u0000\u0259\u025b"+ - "\u0005\"\u0000\u0000\u025a\u0259\u0001\u0000\u0000\u0000\u025a\u025b\u0001"+ - "\u0000\u0000\u0000\u025b\u025d\u0001\u0000\u0000\u0000\u025c\u025e\u0005"+ - "\"\u0000\u0000\u025d\u025c\u0001\u0000\u0000\u0000\u025d\u025e\u0001\u0000"+ - "\u0000\u0000\u025e\u0260\u0001\u0000\u0000\u0000\u025f\u0242\u0001\u0000"+ - "\u0000\u0000\u025f\u024b\u0001\u0000\u0000\u0000\u0260X\u0001\u0000\u0000"+ - "\u0000\u0261\u0263\u0003C\u001c\u0000\u0262\u0261\u0001\u0000\u0000\u0000"+ - "\u0263\u0264\u0001\u0000\u0000\u0000\u0264\u0262\u0001\u0000\u0000\u0000"+ - "\u0264\u0265\u0001\u0000\u0000\u0000\u0265Z\u0001\u0000\u0000\u0000\u0266"+ - "\u0268\u0003C\u001c\u0000\u0267\u0266\u0001\u0000\u0000\u0000\u0268\u0269"+ - "\u0001\u0000\u0000\u0000\u0269\u0267\u0001\u0000\u0000\u0000\u0269\u026a"+ - "\u0001\u0000\u0000\u0000\u026a\u026b\u0001\u0000\u0000\u0000\u026b\u026f"+ - "\u0003i/\u0000\u026c\u026e\u0003C\u001c\u0000\u026d\u026c\u0001\u0000"+ - "\u0000\u0000\u026e\u0271\u0001\u0000\u0000\u0000\u026f\u026d\u0001\u0000"+ - "\u0000\u0000\u026f\u0270\u0001\u0000\u0000\u0000\u0270\u0291\u0001\u0000"+ - "\u0000\u0000\u0271\u026f\u0001\u0000\u0000\u0000\u0272\u0274\u0003i/\u0000"+ - "\u0273\u0275\u0003C\u001c\u0000\u0274\u0273\u0001\u0000\u0000\u0000\u0275"+ - "\u0276\u0001\u0000\u0000\u0000\u0276\u0274\u0001\u0000\u0000\u0000\u0276"+ - "\u0277\u0001\u0000\u0000\u0000\u0277\u0291\u0001\u0000\u0000\u0000\u0278"+ - "\u027a\u0003C\u001c\u0000\u0279\u0278\u0001\u0000\u0000\u0000\u027a\u027b"+ - "\u0001\u0000\u0000\u0000\u027b\u0279\u0001\u0000\u0000\u0000\u027b\u027c"+ - "\u0001\u0000\u0000\u0000\u027c\u0284\u0001\u0000\u0000\u0000\u027d\u0281"+ - "\u0003i/\u0000\u027e\u0280\u0003C\u001c\u0000\u027f\u027e\u0001\u0000"+ - "\u0000\u0000\u0280\u0283\u0001\u0000\u0000\u0000\u0281\u027f\u0001\u0000"+ - "\u0000\u0000\u0281\u0282\u0001\u0000\u0000\u0000\u0282\u0285\u0001\u0000"+ - "\u0000\u0000\u0283\u0281\u0001\u0000\u0000\u0000\u0284\u027d\u0001\u0000"+ - "\u0000\u0000\u0284\u0285\u0001\u0000\u0000\u0000\u0285\u0286\u0001\u0000"+ - "\u0000\u0000\u0286\u0287\u0003K \u0000\u0287\u0291\u0001\u0000\u0000\u0000"+ - "\u0288\u028a\u0003i/\u0000\u0289\u028b\u0003C\u001c\u0000\u028a\u0289"+ - "\u0001\u0000\u0000\u0000\u028b\u028c\u0001\u0000\u0000\u0000\u028c\u028a"+ - "\u0001\u0000\u0000\u0000\u028c\u028d\u0001\u0000\u0000\u0000\u028d\u028e"+ - "\u0001\u0000\u0000\u0000\u028e\u028f\u0003K \u0000\u028f\u0291\u0001\u0000"+ - "\u0000\u0000\u0290\u0267\u0001\u0000\u0000\u0000\u0290\u0272\u0001\u0000"+ - "\u0000\u0000\u0290\u0279\u0001\u0000\u0000\u0000\u0290\u0288\u0001\u0000"+ - "\u0000\u0000\u0291\\\u0001\u0000\u0000\u0000\u0292\u0293\u0005b\u0000"+ - "\u0000\u0293\u0294\u0005y\u0000\u0000\u0294^\u0001\u0000\u0000\u0000\u0295"+ - "\u0296\u0005a\u0000\u0000\u0296\u0297\u0005n\u0000\u0000\u0297\u0298\u0005"+ - "d\u0000\u0000\u0298`\u0001\u0000\u0000\u0000\u0299\u029a\u0005a\u0000"+ - "\u0000\u029a\u029b\u0005s\u0000\u0000\u029b\u029c\u0005c\u0000\u0000\u029c"+ - "b\u0001\u0000\u0000\u0000\u029d\u029e\u0005=\u0000\u0000\u029ed\u0001"+ - "\u0000\u0000\u0000\u029f\u02a0\u0005,\u0000\u0000\u02a0f\u0001\u0000\u0000"+ - "\u0000\u02a1\u02a2\u0005d\u0000\u0000\u02a2\u02a3\u0005e\u0000\u0000\u02a3"+ - "\u02a4\u0005s\u0000\u0000\u02a4\u02a5\u0005c\u0000\u0000\u02a5h\u0001"+ - "\u0000\u0000\u0000\u02a6\u02a7\u0005.\u0000\u0000\u02a7j\u0001\u0000\u0000"+ - "\u0000\u02a8\u02a9\u0005f\u0000\u0000\u02a9\u02aa\u0005a\u0000\u0000\u02aa"+ - "\u02ab\u0005l\u0000\u0000\u02ab\u02ac\u0005s\u0000\u0000\u02ac\u02ad\u0005"+ - "e\u0000\u0000\u02adl\u0001\u0000\u0000\u0000\u02ae\u02af\u0005f\u0000"+ - "\u0000\u02af\u02b0\u0005i\u0000\u0000\u02b0\u02b1\u0005r\u0000\u0000\u02b1"+ - "\u02b2\u0005s\u0000\u0000\u02b2\u02b3\u0005t\u0000\u0000\u02b3n\u0001"+ - "\u0000\u0000\u0000\u02b4\u02b5\u0005l\u0000\u0000\u02b5\u02b6\u0005a\u0000"+ - "\u0000\u02b6\u02b7\u0005s\u0000\u0000\u02b7\u02b8\u0005t\u0000\u0000\u02b8"+ - "p\u0001\u0000\u0000\u0000\u02b9\u02ba\u0005(\u0000\u0000\u02bar\u0001"+ - "\u0000\u0000\u0000\u02bb\u02bc\u0005i\u0000\u0000\u02bc\u02bd\u0005n\u0000"+ - "\u0000\u02bdt\u0001\u0000\u0000\u0000\u02be\u02bf\u0005i\u0000\u0000\u02bf"+ - "\u02c0\u0005s\u0000\u0000\u02c0v\u0001\u0000\u0000\u0000\u02c1\u02c2\u0005"+ - "l\u0000\u0000\u02c2\u02c3\u0005i\u0000\u0000\u02c3\u02c4\u0005k\u0000"+ - "\u0000\u02c4\u02c5\u0005e\u0000\u0000\u02c5x\u0001\u0000\u0000\u0000\u02c6"+ - "\u02c7\u0005n\u0000\u0000\u02c7\u02c8\u0005o\u0000\u0000\u02c8\u02c9\u0005"+ - "t\u0000\u0000\u02c9z\u0001\u0000\u0000\u0000\u02ca\u02cb\u0005n\u0000"+ - "\u0000\u02cb\u02cc\u0005u\u0000\u0000\u02cc\u02cd\u0005l\u0000\u0000\u02cd"+ - "\u02ce\u0005l\u0000\u0000\u02ce|\u0001\u0000\u0000\u0000\u02cf\u02d0\u0005"+ - "n\u0000\u0000\u02d0\u02d1\u0005u\u0000\u0000\u02d1\u02d2\u0005l\u0000"+ - "\u0000\u02d2\u02d3\u0005l\u0000\u0000\u02d3\u02d4\u0005s\u0000\u0000\u02d4"+ - "~\u0001\u0000\u0000\u0000\u02d5\u02d6\u0005o\u0000\u0000\u02d6\u02d7\u0005"+ - "r\u0000\u0000\u02d7\u0080\u0001\u0000\u0000\u0000\u02d8\u02d9\u0005?\u0000"+ - "\u0000\u02d9\u0082\u0001\u0000\u0000\u0000\u02da\u02db\u0005r\u0000\u0000"+ - "\u02db\u02dc\u0005l\u0000\u0000\u02dc\u02dd\u0005i\u0000\u0000\u02dd\u02de"+ - "\u0005k\u0000\u0000\u02de\u02df\u0005e\u0000\u0000\u02df\u0084\u0001\u0000"+ - "\u0000\u0000\u02e0\u02e1\u0005)\u0000\u0000\u02e1\u0086\u0001\u0000\u0000"+ - "\u0000\u02e2\u02e3\u0005t\u0000\u0000\u02e3\u02e4\u0005r\u0000\u0000\u02e4"+ - "\u02e5\u0005u\u0000\u0000\u02e5\u02e6\u0005e\u0000\u0000\u02e6\u0088\u0001"+ - "\u0000\u0000\u0000\u02e7\u02e8\u0005=\u0000\u0000\u02e8\u02e9\u0005=\u0000"+ - "\u0000\u02e9\u008a\u0001\u0000\u0000\u0000\u02ea\u02eb\u0005=\u0000\u0000"+ - "\u02eb\u02ec\u0005~\u0000\u0000\u02ec\u008c\u0001\u0000\u0000\u0000\u02ed"+ - "\u02ee\u0005!\u0000\u0000\u02ee\u02ef\u0005=\u0000\u0000\u02ef\u008e\u0001"+ - "\u0000\u0000\u0000\u02f0\u02f1\u0005<\u0000\u0000\u02f1\u0090\u0001\u0000"+ - "\u0000\u0000\u02f2\u02f3\u0005<\u0000\u0000\u02f3\u02f4\u0005=\u0000\u0000"+ - "\u02f4\u0092\u0001\u0000\u0000\u0000\u02f5\u02f6\u0005>\u0000\u0000\u02f6"+ - "\u0094\u0001\u0000\u0000\u0000\u02f7\u02f8\u0005>\u0000\u0000\u02f8\u02f9"+ - "\u0005=\u0000\u0000\u02f9\u0096\u0001\u0000\u0000\u0000\u02fa\u02fb\u0005"+ - "+\u0000\u0000\u02fb\u0098\u0001\u0000\u0000\u0000\u02fc\u02fd\u0005-\u0000"+ - "\u0000\u02fd\u009a\u0001\u0000\u0000\u0000\u02fe\u02ff\u0005*\u0000\u0000"+ - "\u02ff\u009c\u0001\u0000\u0000\u0000\u0300\u0301\u0005/\u0000\u0000\u0301"+ - "\u009e\u0001\u0000\u0000\u0000\u0302\u0303\u0005%\u0000\u0000\u0303\u00a0"+ - "\u0001\u0000\u0000\u0000\u0304\u0305\u0005[\u0000\u0000\u0305\u0306\u0001"+ - "\u0000\u0000\u0000\u0306\u0307\u0006K\u0000\u0000\u0307\u0308\u0006K\u0000"+ - "\u0000\u0308\u00a2\u0001\u0000\u0000\u0000\u0309\u030a\u0005]\u0000\u0000"+ - "\u030a\u030b\u0001\u0000\u0000\u0000\u030b\u030c\u0006L\f\u0000\u030c"+ - "\u030d\u0006L\f\u0000\u030d\u00a4\u0001\u0000\u0000\u0000\u030e\u0312"+ - "\u0003E\u001d\u0000\u030f\u0311\u0003U%\u0000\u0310\u030f\u0001\u0000"+ - "\u0000\u0000\u0311\u0314\u0001\u0000\u0000\u0000\u0312\u0310\u0001\u0000"+ - "\u0000\u0000\u0312\u0313\u0001\u0000\u0000\u0000\u0313\u031f\u0001\u0000"+ - "\u0000\u0000\u0314\u0312\u0001\u0000\u0000\u0000\u0315\u0318\u0003S$\u0000"+ - "\u0316\u0318\u0003M!\u0000\u0317\u0315\u0001\u0000\u0000\u0000\u0317\u0316"+ - "\u0001\u0000\u0000\u0000\u0318\u031a\u0001\u0000\u0000\u0000\u0319\u031b"+ - "\u0003U%\u0000\u031a\u0319\u0001\u0000\u0000\u0000\u031b\u031c\u0001\u0000"+ - "\u0000\u0000\u031c\u031a\u0001\u0000\u0000\u0000\u031c\u031d\u0001\u0000"+ - "\u0000\u0000\u031d\u031f\u0001\u0000\u0000\u0000\u031e\u030e\u0001\u0000"+ - "\u0000\u0000\u031e\u0317\u0001\u0000\u0000\u0000\u031f\u00a6\u0001\u0000"+ - "\u0000\u0000\u0320\u0322\u0003O\"\u0000\u0321\u0323\u0003Q#\u0000\u0322"+ - "\u0321\u0001\u0000\u0000\u0000\u0323\u0324\u0001\u0000\u0000\u0000\u0324"+ - "\u0322\u0001\u0000\u0000\u0000\u0324\u0325\u0001\u0000\u0000\u0000\u0325"+ - "\u0326\u0001\u0000\u0000\u0000\u0326\u0327\u0003O\"\u0000\u0327\u00a8"+ - "\u0001\u0000\u0000\u0000\u0328\u0329\u00031\u0013\u0000\u0329\u032a\u0001"+ - "\u0000\u0000\u0000\u032a\u032b\u0006O\b\u0000\u032b\u00aa\u0001\u0000"+ - "\u0000\u0000\u032c\u032d\u00033\u0014\u0000\u032d\u032e\u0001\u0000\u0000"+ - "\u0000\u032e\u032f\u0006P\b\u0000\u032f\u00ac\u0001\u0000\u0000\u0000"+ - "\u0330\u0331\u00035\u0015\u0000\u0331\u0332\u0001\u0000\u0000\u0000\u0332"+ - "\u0333\u0006Q\b\u0000\u0333\u00ae\u0001\u0000\u0000\u0000\u0334\u0335"+ - "\u0003A\u001b\u0000\u0335\u0336\u0001\u0000\u0000\u0000\u0336\u0337\u0006"+ - "R\u000b\u0000\u0337\u0338\u0006R\f\u0000\u0338\u00b0\u0001\u0000\u0000"+ - "\u0000\u0339\u033a\u0003\u00a1K\u0000\u033a\u033b\u0001\u0000\u0000\u0000"+ - "\u033b\u033c\u0006S\t\u0000\u033c\u00b2\u0001\u0000\u0000\u0000\u033d"+ - "\u033e\u0003\u00a3L\u0000\u033e\u033f\u0001\u0000\u0000\u0000\u033f\u0340"+ - "\u0006T\r\u0000\u0340\u00b4\u0001\u0000\u0000\u0000\u0341\u0342\u0003"+ - "e-\u0000\u0342\u0343\u0001\u0000\u0000\u0000\u0343\u0344\u0006U\u000e"+ - "\u0000\u0344\u00b6\u0001\u0000\u0000\u0000\u0345\u0346\u0003c,\u0000\u0346"+ - "\u0347\u0001\u0000\u0000\u0000\u0347\u0348\u0006V\u000f\u0000\u0348\u00b8"+ - "\u0001\u0000\u0000\u0000\u0349\u034a\u0005m\u0000\u0000\u034a\u034b\u0005"+ - "e\u0000\u0000\u034b\u034c\u0005t\u0000\u0000\u034c\u034d\u0005a\u0000"+ - "\u0000\u034d\u034e\u0005d\u0000\u0000\u034e\u034f\u0005a\u0000\u0000\u034f"+ - "\u0350\u0005t\u0000\u0000\u0350\u0351\u0005a\u0000\u0000\u0351\u00ba\u0001"+ - "\u0000\u0000\u0000\u0352\u0356\b\n\u0000\u0000\u0353\u0354\u0005/\u0000"+ - "\u0000\u0354\u0356\b\u000b\u0000\u0000\u0355\u0352\u0001\u0000\u0000\u0000"+ - "\u0355\u0353\u0001\u0000\u0000\u0000\u0356\u00bc\u0001\u0000\u0000\u0000"+ - "\u0357\u0359\u0003\u00bbX\u0000\u0358\u0357\u0001\u0000\u0000\u0000\u0359"+ - "\u035a\u0001\u0000\u0000\u0000\u035a\u0358\u0001\u0000\u0000\u0000\u035a"+ - "\u035b\u0001\u0000\u0000\u0000\u035b\u00be\u0001\u0000\u0000\u0000\u035c"+ - "\u035d\u0003\u00a7N\u0000\u035d\u035e\u0001\u0000\u0000\u0000\u035e\u035f"+ - "\u0006Z\u0010\u0000\u035f\u00c0\u0001\u0000\u0000\u0000\u0360\u0361\u0003"+ - "1\u0013\u0000\u0361\u0362\u0001\u0000\u0000\u0000\u0362\u0363\u0006[\b"+ - "\u0000\u0363\u00c2\u0001\u0000\u0000\u0000\u0364\u0365\u00033\u0014\u0000"+ - "\u0365\u0366\u0001\u0000\u0000\u0000\u0366\u0367\u0006\\\b\u0000\u0367"+ - "\u00c4\u0001\u0000\u0000\u0000\u0368\u0369\u00035\u0015\u0000\u0369\u036a"+ - "\u0001\u0000\u0000\u0000\u036a\u036b\u0006]\b\u0000\u036b\u00c6\u0001"+ - "\u0000\u0000\u0000\u036c\u036d\u0003A\u001b\u0000\u036d\u036e\u0001\u0000"+ - "\u0000\u0000\u036e\u036f\u0006^\u000b\u0000\u036f\u0370\u0006^\f\u0000"+ - "\u0370\u00c8\u0001\u0000\u0000\u0000\u0371\u0372\u0003i/\u0000\u0372\u0373"+ - "\u0001\u0000\u0000\u0000\u0373\u0374\u0006_\u0011\u0000\u0374\u00ca\u0001"+ - "\u0000\u0000\u0000\u0375\u0376\u0003e-\u0000\u0376\u0377\u0001\u0000\u0000"+ - "\u0000\u0377\u0378\u0006`\u000e\u0000\u0378\u00cc\u0001\u0000\u0000\u0000"+ - "\u0379\u037e\u0003E\u001d\u0000\u037a\u037e\u0003C\u001c\u0000\u037b\u037e"+ - "\u0003S$\u0000\u037c\u037e\u0003\u009bH\u0000\u037d\u0379\u0001\u0000"+ - "\u0000\u0000\u037d\u037a\u0001\u0000\u0000\u0000\u037d\u037b\u0001\u0000"+ - "\u0000\u0000\u037d\u037c\u0001\u0000\u0000\u0000\u037e\u00ce\u0001\u0000"+ - "\u0000\u0000\u037f\u0382\u0003E\u001d\u0000\u0380\u0382\u0003\u009bH\u0000"+ - "\u0381\u037f\u0001\u0000\u0000\u0000\u0381\u0380\u0001\u0000\u0000\u0000"+ - "\u0382\u0386\u0001\u0000\u0000\u0000\u0383\u0385\u0003\u00cda\u0000\u0384"+ - "\u0383\u0001\u0000\u0000\u0000\u0385\u0388\u0001\u0000\u0000\u0000\u0386"+ - "\u0384\u0001\u0000\u0000\u0000\u0386\u0387\u0001\u0000\u0000\u0000\u0387"+ - "\u0393\u0001\u0000\u0000\u0000\u0388\u0386\u0001\u0000\u0000\u0000\u0389"+ - "\u038c\u0003S$\u0000\u038a\u038c\u0003M!\u0000\u038b\u0389\u0001\u0000"+ - "\u0000\u0000\u038b\u038a\u0001\u0000\u0000\u0000\u038c\u038e\u0001\u0000"+ - "\u0000\u0000\u038d\u038f\u0003\u00cda\u0000\u038e\u038d\u0001\u0000\u0000"+ - "\u0000\u038f\u0390\u0001\u0000\u0000\u0000\u0390\u038e\u0001\u0000\u0000"+ - "\u0000\u0390\u0391\u0001\u0000\u0000\u0000\u0391\u0393\u0001\u0000\u0000"+ - "\u0000\u0392\u0381\u0001\u0000\u0000\u0000\u0392\u038b\u0001\u0000\u0000"+ - "\u0000\u0393\u00d0\u0001\u0000\u0000\u0000\u0394\u0395\u0003\u00cfb\u0000"+ - "\u0395\u0396\u0001\u0000\u0000\u0000\u0396\u0397\u0006c\u0012\u0000\u0397"+ - "\u00d2\u0001\u0000\u0000\u0000\u0398\u0399\u0003\u00a7N\u0000\u0399\u039a"+ - "\u0001\u0000\u0000\u0000\u039a\u039b\u0006d\u0010\u0000\u039b\u00d4\u0001"+ - "\u0000\u0000\u0000\u039c\u039d\u00031\u0013\u0000\u039d\u039e\u0001\u0000"+ - "\u0000\u0000\u039e\u039f\u0006e\b\u0000\u039f\u00d6\u0001\u0000\u0000"+ - "\u0000\u03a0\u03a1\u00033\u0014\u0000\u03a1\u03a2\u0001\u0000\u0000\u0000"+ - "\u03a2\u03a3\u0006f\b\u0000\u03a3\u00d8\u0001\u0000\u0000\u0000\u03a4"+ - "\u03a5\u00035\u0015\u0000\u03a5\u03a6\u0001\u0000\u0000\u0000\u03a6\u03a7"+ - "\u0006g\b\u0000\u03a7\u00da\u0001\u0000\u0000\u0000\u03a8\u03a9\u0003"+ - "A\u001b\u0000\u03a9\u03aa\u0001\u0000\u0000\u0000\u03aa\u03ab\u0006h\u000b"+ - "\u0000\u03ab\u03ac\u0006h\f\u0000\u03ac\u00dc\u0001\u0000\u0000\u0000"+ - "\u03ad\u03ae\u0003c,\u0000\u03ae\u03af\u0001\u0000\u0000\u0000\u03af\u03b0"+ - "\u0006i\u000f\u0000\u03b0\u00de\u0001\u0000\u0000\u0000\u03b1\u03b2\u0003"+ - "e-\u0000\u03b2\u03b3\u0001\u0000\u0000\u0000\u03b3\u03b4\u0006j\u000e"+ - "\u0000\u03b4\u00e0\u0001\u0000\u0000\u0000\u03b5\u03b6\u0003i/\u0000\u03b6"+ - "\u03b7\u0001\u0000\u0000\u0000\u03b7\u03b8\u0006k\u0011\u0000\u03b8\u00e2"+ - "\u0001\u0000\u0000\u0000\u03b9\u03ba\u0005a\u0000\u0000\u03ba\u03bb\u0005"+ - "s\u0000\u0000\u03bb\u00e4\u0001\u0000\u0000\u0000\u03bc\u03bd\u0003\u00a7"+ - "N\u0000\u03bd\u03be\u0001\u0000\u0000\u0000\u03be\u03bf\u0006m\u0010\u0000"+ - "\u03bf\u00e6\u0001\u0000\u0000\u0000\u03c0\u03c1\u0003\u00cfb\u0000\u03c1"+ - "\u03c2\u0001\u0000\u0000\u0000\u03c2\u03c3\u0006n\u0012\u0000\u03c3\u00e8"+ - "\u0001\u0000\u0000\u0000\u03c4\u03c5\u00031\u0013\u0000\u03c5\u03c6\u0001"+ - "\u0000\u0000\u0000\u03c6\u03c7\u0006o\b\u0000\u03c7\u00ea\u0001\u0000"+ - "\u0000\u0000\u03c8\u03c9\u00033\u0014\u0000\u03c9\u03ca\u0001\u0000\u0000"+ - "\u0000\u03ca\u03cb\u0006p\b\u0000\u03cb\u00ec\u0001\u0000\u0000\u0000"+ - "\u03cc\u03cd\u00035\u0015\u0000\u03cd\u03ce\u0001\u0000\u0000\u0000\u03ce"+ - "\u03cf\u0006q\b\u0000\u03cf\u00ee\u0001\u0000\u0000\u0000\u03d0\u03d1"+ - "\u0003A\u001b\u0000\u03d1\u03d2\u0001\u0000\u0000\u0000\u03d2\u03d3\u0006"+ - "r\u000b\u0000\u03d3\u03d4\u0006r\f\u0000\u03d4\u00f0\u0001\u0000\u0000"+ - "\u0000\u03d5\u03d6\u0003\u00a1K\u0000\u03d6\u03d7\u0001\u0000\u0000\u0000"+ - "\u03d7\u03d8\u0006s\t\u0000\u03d8\u03d9\u0006s\u0013\u0000\u03d9\u00f2"+ - "\u0001\u0000\u0000\u0000\u03da\u03db\u0005o\u0000\u0000\u03db\u03dc\u0005"+ - "n\u0000\u0000\u03dc\u03dd\u0001\u0000\u0000\u0000\u03dd\u03de\u0006t\u0014"+ - "\u0000\u03de\u00f4\u0001\u0000\u0000\u0000\u03df\u03e0\u0005w\u0000\u0000"+ - "\u03e0\u03e1\u0005i\u0000\u0000\u03e1\u03e2\u0005t\u0000\u0000\u03e2\u03e3"+ - "\u0005h\u0000\u0000\u03e3\u03e4\u0001\u0000\u0000\u0000\u03e4\u03e5\u0006"+ - "u\u0014\u0000\u03e5\u00f6\u0001\u0000\u0000\u0000\u03e6\u03e7\b\f\u0000"+ - "\u0000\u03e7\u00f8\u0001\u0000\u0000\u0000\u03e8\u03eb\u0003E\u001d\u0000"+ - "\u03e9\u03eb\u0003C\u001c\u0000\u03ea\u03e8\u0001\u0000\u0000\u0000\u03ea"+ - "\u03e9\u0001\u0000\u0000\u0000\u03eb\u03ef\u0001\u0000\u0000\u0000\u03ec"+ - "\u03ee\u0003\u00f7v\u0000\u03ed\u03ec\u0001\u0000\u0000\u0000\u03ee\u03f1"+ - "\u0001\u0000\u0000\u0000\u03ef\u03ed\u0001\u0000\u0000\u0000\u03ef\u03f0"+ - "\u0001\u0000\u0000\u0000\u03f0\u00fa\u0001\u0000\u0000\u0000\u03f1\u03ef"+ - "\u0001\u0000\u0000\u0000\u03f2\u03f3\u0003\u00a7N\u0000\u03f3\u03f4\u0001"+ - "\u0000\u0000\u0000\u03f4\u03f5\u0006x\u0010\u0000\u03f5\u00fc\u0001\u0000"+ - "\u0000\u0000\u03f6\u03f7\u0003\u00f9w\u0000\u03f7\u03f8\u0001\u0000\u0000"+ - "\u0000\u03f8\u03f9\u0006y\u0015\u0000\u03f9\u00fe\u0001\u0000\u0000\u0000"+ - "\u03fa\u03fb\u00031\u0013\u0000\u03fb\u03fc\u0001\u0000\u0000\u0000\u03fc"+ - "\u03fd\u0006z\b\u0000\u03fd\u0100\u0001\u0000\u0000\u0000\u03fe\u03ff"+ - "\u00033\u0014\u0000\u03ff\u0400\u0001\u0000\u0000\u0000\u0400\u0401\u0006"+ - "{\b\u0000\u0401\u0102\u0001\u0000\u0000\u0000\u0402\u0403\u00035\u0015"+ - "\u0000\u0403\u0404\u0001\u0000\u0000\u0000\u0404\u0405\u0006|\b\u0000"+ - "\u0405\u0104\u0001\u0000\u0000\u0000\u0406\u0407\u0003A\u001b\u0000\u0407"+ - "\u0408\u0001\u0000\u0000\u0000\u0408\u0409\u0006}\u000b\u0000\u0409\u040a"+ - "\u0006}\f\u0000\u040a\u040b\u0006}\f\u0000\u040b\u0106\u0001\u0000\u0000"+ - "\u0000\u040c\u040d\u0003c,\u0000\u040d\u040e\u0001\u0000\u0000\u0000\u040e"+ - "\u040f\u0006~\u000f\u0000\u040f\u0108\u0001\u0000\u0000\u0000\u0410\u0411"+ - "\u0003e-\u0000\u0411\u0412\u0001\u0000\u0000\u0000\u0412\u0413\u0006\u007f"+ - "\u000e\u0000\u0413\u010a\u0001\u0000\u0000\u0000\u0414\u0415\u0003i/\u0000"+ - "\u0415\u0416\u0001\u0000\u0000\u0000\u0416\u0417\u0006\u0080\u0011\u0000"+ - "\u0417\u010c\u0001\u0000\u0000\u0000\u0418\u0419\u0003\u00f5u\u0000\u0419"+ - "\u041a\u0001\u0000\u0000\u0000\u041a\u041b\u0006\u0081\u0016\u0000\u041b"+ - "\u010e\u0001\u0000\u0000\u0000\u041c\u041d\u0003\u00cfb\u0000\u041d\u041e"+ - "\u0001\u0000\u0000\u0000\u041e\u041f\u0006\u0082\u0012\u0000\u041f\u0110"+ - "\u0001\u0000\u0000\u0000\u0420\u0421\u0003\u00a7N\u0000\u0421\u0422\u0001"+ - "\u0000\u0000\u0000\u0422\u0423\u0006\u0083\u0010\u0000\u0423\u0112\u0001"+ - "\u0000\u0000\u0000\u0424\u0425\u00031\u0013\u0000\u0425\u0426\u0001\u0000"+ - "\u0000\u0000\u0426\u0427\u0006\u0084\b\u0000\u0427\u0114\u0001\u0000\u0000"+ - "\u0000\u0428\u0429\u00033\u0014\u0000\u0429\u042a\u0001\u0000\u0000\u0000"+ - "\u042a\u042b\u0006\u0085\b\u0000\u042b\u0116\u0001\u0000\u0000\u0000\u042c"+ - "\u042d\u00035\u0015\u0000\u042d\u042e\u0001\u0000\u0000\u0000\u042e\u042f"+ - "\u0006\u0086\b\u0000\u042f\u0118\u0001\u0000\u0000\u0000\u0430\u0431\u0003"+ - "A\u001b\u0000\u0431\u0432\u0001\u0000\u0000\u0000\u0432\u0433\u0006\u0087"+ - "\u000b\u0000\u0433\u0434\u0006\u0087\f\u0000\u0434\u011a\u0001\u0000\u0000"+ - "\u0000\u0435\u0436\u0003i/\u0000\u0436\u0437\u0001\u0000\u0000\u0000\u0437"+ - "\u0438\u0006\u0088\u0011\u0000\u0438\u011c\u0001\u0000\u0000\u0000\u0439"+ - "\u043a\u0003\u00a7N\u0000\u043a\u043b\u0001\u0000\u0000\u0000\u043b\u043c"+ - "\u0006\u0089\u0010\u0000\u043c\u011e\u0001\u0000\u0000\u0000\u043d\u043e"+ - "\u0003\u00a5M\u0000\u043e\u043f\u0001\u0000\u0000\u0000\u043f\u0440\u0006"+ - "\u008a\u0017\u0000\u0440\u0120\u0001\u0000\u0000\u0000\u0441\u0442\u0003"+ - "1\u0013\u0000\u0442\u0443\u0001\u0000\u0000\u0000\u0443\u0444\u0006\u008b"+ - "\b\u0000\u0444\u0122\u0001\u0000\u0000\u0000\u0445\u0446\u00033\u0014"+ - "\u0000\u0446\u0447\u0001\u0000\u0000\u0000\u0447\u0448\u0006\u008c\b\u0000"+ - "\u0448\u0124\u0001\u0000\u0000\u0000\u0449\u044a\u00035\u0015\u0000\u044a"+ - "\u044b\u0001\u0000\u0000\u0000\u044b\u044c\u0006\u008d\b\u0000\u044c\u0126"+ - "\u0001\u0000\u0000\u0000\u044d\u044e\u0003A\u001b\u0000\u044e\u044f\u0001"+ - "\u0000\u0000\u0000\u044f\u0450\u0006\u008e\u000b\u0000\u0450\u0451\u0006"+ - "\u008e\f\u0000\u0451\u0128\u0001\u0000\u0000\u0000\u0452\u0453\u0005i"+ - "\u0000\u0000\u0453\u0454\u0005n\u0000\u0000\u0454\u0455\u0005f\u0000\u0000"+ - "\u0455\u0456\u0005o\u0000\u0000\u0456\u012a\u0001\u0000\u0000\u0000\u0457"+ - "\u0458\u0005f\u0000\u0000\u0458\u0459\u0005u\u0000\u0000\u0459\u045a\u0005"+ - "n\u0000\u0000\u045a\u045b\u0005c\u0000\u0000\u045b\u045c\u0005t\u0000"+ - "\u0000\u045c\u045d\u0005i\u0000\u0000\u045d\u045e\u0005o\u0000\u0000\u045e"+ - "\u045f\u0005n\u0000\u0000\u045f\u0460\u0005s\u0000\u0000\u0460\u012c\u0001"+ - "\u0000\u0000\u0000\u0461\u0462\u00031\u0013\u0000\u0462\u0463\u0001\u0000"+ - "\u0000\u0000\u0463\u0464\u0006\u0091\b\u0000\u0464\u012e\u0001\u0000\u0000"+ - "\u0000\u0465\u0466\u00033\u0014\u0000\u0466\u0467\u0001\u0000\u0000\u0000"+ - "\u0467\u0468\u0006\u0092\b\u0000\u0468\u0130\u0001\u0000\u0000\u0000\u0469"+ - "\u046a\u00035\u0015\u0000\u046a\u046b\u0001\u0000\u0000\u0000\u046b\u046c"+ - "\u0006\u0093\b\u0000\u046c\u0132\u0001\u0000\u0000\u0000\u046d\u046e\u0003"+ - "\u00a3L\u0000\u046e\u046f\u0001\u0000\u0000\u0000\u046f\u0470\u0006\u0094"+ - "\r\u0000\u0470\u0471\u0006\u0094\f\u0000\u0471\u0134\u0001\u0000\u0000"+ - "\u0000\u0472\u0473\u0005:\u0000\u0000\u0473\u0136\u0001\u0000\u0000\u0000"+ - "\u0474\u047a\u0003M!\u0000\u0475\u047a\u0003C\u001c\u0000\u0476\u047a"+ - "\u0003i/\u0000\u0477\u047a\u0003E\u001d\u0000\u0478\u047a\u0003S$\u0000"+ - "\u0479\u0474\u0001\u0000\u0000\u0000\u0479\u0475\u0001\u0000\u0000\u0000"+ - "\u0479\u0476\u0001\u0000\u0000\u0000\u0479\u0477\u0001\u0000\u0000\u0000"+ - "\u0479\u0478\u0001\u0000\u0000\u0000\u047a\u047b\u0001\u0000\u0000\u0000"+ - "\u047b\u0479\u0001\u0000\u0000\u0000\u047b\u047c\u0001\u0000\u0000\u0000"+ - "\u047c\u0138\u0001\u0000\u0000\u0000\u047d\u047e\u00031\u0013\u0000\u047e"+ - "\u047f\u0001\u0000\u0000\u0000\u047f\u0480\u0006\u0097\b\u0000\u0480\u013a"+ - "\u0001\u0000\u0000\u0000\u0481\u0482\u00033\u0014\u0000\u0482\u0483\u0001"+ - "\u0000\u0000\u0000\u0483\u0484\u0006\u0098\b\u0000\u0484\u013c\u0001\u0000"+ - "\u0000\u0000\u0485\u0486\u00035\u0015\u0000\u0486\u0487\u0001\u0000\u0000"+ - "\u0000\u0487\u0488\u0006\u0099\b\u0000\u0488\u013e\u0001\u0000\u0000\u0000"+ - "6\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u01db\u01e5\u01e9"+ - "\u01ec\u01f5\u01f7\u0202\u022b\u0230\u0239\u0240\u0245\u0247\u0252\u025a"+ - "\u025d\u025f\u0264\u0269\u026f\u0276\u027b\u0281\u0284\u028c\u0290\u0312"+ - "\u0317\u031c\u031e\u0324\u0355\u035a\u037d\u0381\u0386\u038b\u0390\u0392"+ - "\u03ea\u03ef\u0479\u047b\u0018\u0005\u0002\u0000\u0005\u0004\u0000\u0005"+ - "\u0006\u0000\u0005\u0001\u0000\u0005\u0003\u0000\u0005\b\u0000\u0005\u0005"+ - "\u0000\u0005\t\u0000\u0000\u0001\u0000\u0007@\u0000\u0005\u0000\u0000"+ - "\u0007\u001a\u0000\u0004\u0000\u0000\u0007A\u0000\u0007\"\u0000\u0007"+ - "!\u0000\u0007C\u0000\u0007$\u0000\u0007L\u0000\u0005\n\u0000\u0005\u0007"+ - "\u0000\u0007V\u0000\u0007U\u0000\u0007B\u0000"; + "\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001d"+ + "\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0003\u001f\u0220\b\u001f"+ + "\u0001\u001f\u0004\u001f\u0223\b\u001f\u000b\u001f\f\u001f\u0224\u0001"+ + " \u0001 \u0001!\u0001!\u0001\"\u0001\"\u0001\"\u0003\"\u022e\b\"\u0001"+ + "#\u0001#\u0001$\u0001$\u0001$\u0003$\u0235\b$\u0001%\u0001%\u0001%\u0005"+ + "%\u023a\b%\n%\f%\u023d\t%\u0001%\u0001%\u0001%\u0001%\u0001%\u0001%\u0005"+ + "%\u0245\b%\n%\f%\u0248\t%\u0001%\u0001%\u0001%\u0001%\u0001%\u0003%\u024f"+ + "\b%\u0001%\u0003%\u0252\b%\u0003%\u0254\b%\u0001&\u0004&\u0257\b&\u000b"+ + "&\f&\u0258\u0001\'\u0004\'\u025c\b\'\u000b\'\f\'\u025d\u0001\'\u0001\'"+ + "\u0005\'\u0262\b\'\n\'\f\'\u0265\t\'\u0001\'\u0001\'\u0004\'\u0269\b\'"+ + "\u000b\'\f\'\u026a\u0001\'\u0004\'\u026e\b\'\u000b\'\f\'\u026f\u0001\'"+ + "\u0001\'\u0005\'\u0274\b\'\n\'\f\'\u0277\t\'\u0003\'\u0279\b\'\u0001\'"+ + "\u0001\'\u0001\'\u0001\'\u0004\'\u027f\b\'\u000b\'\f\'\u0280\u0001\'\u0001"+ + "\'\u0003\'\u0285\b\'\u0001(\u0001(\u0001(\u0001)\u0001)\u0001)\u0001)"+ + "\u0001*\u0001*\u0001*\u0001*\u0001+\u0001+\u0001,\u0001,\u0001-\u0001"+ + "-\u0001-\u0001-\u0001-\u0001.\u0001.\u0001/\u0001/\u0001/\u0001/\u0001"+ + "/\u0001/\u00010\u00010\u00010\u00010\u00010\u00010\u00011\u00011\u0001"+ + "1\u00011\u00011\u00012\u00012\u00013\u00013\u00013\u00014\u00014\u0001"+ + "4\u00015\u00015\u00015\u00015\u00015\u00016\u00016\u00016\u00016\u0001"+ + "7\u00017\u00017\u00017\u00017\u00018\u00018\u00018\u00018\u00018\u0001"+ + "8\u00019\u00019\u00019\u0001:\u0001:\u0001;\u0001;\u0001;\u0001;\u0001"+ + ";\u0001;\u0001<\u0001<\u0001=\u0001=\u0001=\u0001=\u0001=\u0001>\u0001"+ + ">\u0001>\u0001?\u0001?\u0001?\u0001@\u0001@\u0001@\u0001A\u0001A\u0001"+ + "B\u0001B\u0001B\u0001C\u0001C\u0001D\u0001D\u0001D\u0001E\u0001E\u0001"+ + "F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001I\u0001I\u0001J\u0001J\u0001"+ + "J\u0001J\u0001J\u0001K\u0001K\u0001K\u0001K\u0001K\u0001L\u0001L\u0005"+ + "L\u0305\bL\nL\fL\u0308\tL\u0001L\u0001L\u0003L\u030c\bL\u0001L\u0004L"+ + "\u030f\bL\u000bL\fL\u0310\u0003L\u0313\bL\u0001M\u0001M\u0004M\u0317\b"+ + "M\u000bM\fM\u0318\u0001M\u0001M\u0001N\u0001N\u0001N\u0001N\u0001O\u0001"+ + "O\u0001O\u0001O\u0001P\u0001P\u0001P\u0001P\u0001Q\u0001Q\u0001Q\u0001"+ + "Q\u0001Q\u0001R\u0001R\u0001R\u0001R\u0001S\u0001S\u0001S\u0001S\u0001"+ + "T\u0001T\u0001T\u0001T\u0001U\u0001U\u0001U\u0001U\u0001V\u0001V\u0001"+ + "V\u0001V\u0001V\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001W\u0003"+ + "W\u034a\bW\u0001X\u0004X\u034d\bX\u000bX\fX\u034e\u0001Y\u0001Y\u0001"+ + "Y\u0001Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001"+ + "\\\u0001\\\u0001\\\u0001\\\u0001]\u0001]\u0001]\u0001]\u0001]\u0001^\u0001"+ + "^\u0001^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001`\u0001"+ + "`\u0003`\u0372\b`\u0001a\u0001a\u0003a\u0376\ba\u0001a\u0005a\u0379\b"+ + "a\na\fa\u037c\ta\u0001a\u0001a\u0003a\u0380\ba\u0001a\u0004a\u0383\ba"+ + "\u000ba\fa\u0384\u0003a\u0387\ba\u0001b\u0001b\u0001b\u0001b\u0001c\u0001"+ + "c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001d\u0001e\u0001e\u0001e\u0001"+ + "e\u0001f\u0001f\u0001f\u0001f\u0001g\u0001g\u0001g\u0001g\u0001g\u0001"+ + "h\u0001h\u0001h\u0001h\u0001i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001"+ + "j\u0001j\u0001k\u0001k\u0001k\u0001l\u0001l\u0001l\u0001l\u0001m\u0001"+ + "m\u0001m\u0001m\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001"+ + "o\u0001p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001q\u0001"+ + "r\u0001r\u0001r\u0001r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001s\u0001"+ + "t\u0001t\u0001t\u0001t\u0001t\u0001t\u0001t\u0001u\u0001u\u0001v\u0001"+ + "v\u0003v\u03df\bv\u0001v\u0005v\u03e2\bv\nv\fv\u03e5\tv\u0001w\u0001w"+ + "\u0001w\u0001w\u0001x\u0001x\u0001x\u0001x\u0001y\u0001y\u0001y\u0001"+ + "y\u0001z\u0001z\u0001z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001|\u0001"+ + "|\u0001|\u0001|\u0001|\u0001|\u0001}\u0001}\u0001}\u0001}\u0001~\u0001"+ + "~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001"+ + "\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0081\u0001\u0081\u0001"+ + "\u0081\u0001\u0081\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001"+ + "\u0083\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001"+ + "\u0084\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001"+ + "\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0087\u0001"+ + "\u0087\u0001\u0087\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088\u0001"+ + "\u0088\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001"+ + "\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b\u0001\u008b\u0001"+ + "\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001"+ + "\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001"+ + "\u008e\u0001\u008e\u0001\u008e\u0001\u008f\u0001\u008f\u0001\u008f\u0001"+ + "\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001"+ + "\u008f\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0090\u0001\u0091\u0001"+ + "\u0091\u0001\u0091\u0001\u0091\u0001\u0092\u0001\u0092\u0001\u0092\u0001"+ + "\u0092\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001"+ + "\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001"+ + "\u0095\u0004\u0095\u046e\b\u0095\u000b\u0095\f\u0095\u046f\u0001\u0096"+ + "\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0097\u0001\u0097\u0001\u0097"+ + "\u0001\u0097\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0098\u0002\u01eb"+ + "\u0246\u0000\u0099\u000b\u0001\r\u0002\u000f\u0003\u0011\u0004\u0013\u0005"+ + "\u0015\u0006\u0017\u0007\u0019\b\u001b\t\u001d\n\u001f\u000b!\f#\r%\u000e"+ + "\'\u000f)\u0010+\u0011-\u0012/\u00131\u00143\u00155\u00007\u00009\u0016"+ + ";\u0017=\u0018?\u0019A\u0000C\u0000E\u0000G\u0000I\u0000K\u0000M\u0000"+ + "O\u0000Q\u0000S\u0000U\u001aW\u001bY\u001c[\u001d]\u001e_\u001fa c!e\""+ + "g#i$k%m&o\'q(s)u*w+y,{-}.\u007f/\u00810\u00831\u00852\u00873\u00894\u008b"+ + "5\u008d6\u008f7\u00918\u00939\u0095:\u0097;\u0099<\u009b=\u009d>\u009f"+ + "?\u00a1@\u00a3A\u00a5B\u00a7C\u00a9D\u00abE\u00ad\u0000\u00af\u0000\u00b1"+ + "\u0000\u00b3\u0000\u00b5\u0000\u00b7F\u00b9\u0000\u00bbG\u00bd\u0000\u00bf"+ + "H\u00c1I\u00c3J\u00c5\u0000\u00c7\u0000\u00c9\u0000\u00cb\u0000\u00cd"+ + "K\u00cf\u0000\u00d1\u0000\u00d3L\u00d5M\u00d7N\u00d9\u0000\u00db\u0000"+ + "\u00dd\u0000\u00df\u0000\u00e1O\u00e3\u0000\u00e5\u0000\u00e7P\u00e9Q"+ + "\u00ebR\u00ed\u0000\u00ef\u0000\u00f1S\u00f3T\u00f5\u0000\u00f7U\u00f9"+ + "\u0000\u00fb\u0000\u00fdV\u00ffW\u0101X\u0103\u0000\u0105\u0000\u0107"+ + "\u0000\u0109\u0000\u010b\u0000\u010d\u0000\u010f\u0000\u0111Y\u0113Z\u0115"+ + "[\u0117\u0000\u0119\u0000\u011b\u0000\u011d\u0000\u011f\\\u0121]\u0123"+ + "^\u0125\u0000\u0127_\u0129`\u012ba\u012db\u012fc\u0131\u0000\u0133d\u0135"+ + "e\u0137f\u0139g\u013bh\u000b\u0000\u0001\u0002\u0003\u0004\u0005\u0006"+ + "\u0007\b\t\n\r\u0006\u0000\t\n\r\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000"+ + "\t\n\r\r \u0001\u000009\u0002\u0000AZaz\u0005\u0000\"\"\\\\nnrrtt\u0004"+ + "\u0000\n\n\r\r\"\"\\\\\u0002\u0000EEee\u0002\u0000++--\u0001\u0000``\n"+ + "\u0000\t\n\r\r ,,//==[[]]``||\u0002\u0000**//\u000b\u0000\t\n\r\r \""+ + "#,,//::<<>?\\\\||\u0498\u0000\u000b\u0001\u0000\u0000\u0000\u0000\r\u0001"+ + "\u0000\u0000\u0000\u0000\u000f\u0001\u0000\u0000\u0000\u0000\u0011\u0001"+ + "\u0000\u0000\u0000\u0000\u0013\u0001\u0000\u0000\u0000\u0000\u0015\u0001"+ + "\u0000\u0000\u0000\u0000\u0017\u0001\u0000\u0000\u0000\u0000\u0019\u0001"+ + "\u0000\u0000\u0000\u0000\u001b\u0001\u0000\u0000\u0000\u0000\u001d\u0001"+ + "\u0000\u0000\u0000\u0000\u001f\u0001\u0000\u0000\u0000\u0000!\u0001\u0000"+ + "\u0000\u0000\u0000#\u0001\u0000\u0000\u0000\u0000%\u0001\u0000\u0000\u0000"+ + "\u0000\'\u0001\u0000\u0000\u0000\u0000)\u0001\u0000\u0000\u0000\u0000"+ + "+\u0001\u0000\u0000\u0000\u0000-\u0001\u0000\u0000\u0000\u0000/\u0001"+ + "\u0000\u0000\u0000\u00001\u0001\u0000\u0000\u0000\u00003\u0001\u0000\u0000"+ + "\u0000\u00015\u0001\u0000\u0000\u0000\u00017\u0001\u0000\u0000\u0000\u0001"+ + "9\u0001\u0000\u0000\u0000\u0001;\u0001\u0000\u0000\u0000\u0001=\u0001"+ + "\u0000\u0000\u0000\u0002?\u0001\u0000\u0000\u0000\u0002U\u0001\u0000\u0000"+ + "\u0000\u0002W\u0001\u0000\u0000\u0000\u0002Y\u0001\u0000\u0000\u0000\u0002"+ + "[\u0001\u0000\u0000\u0000\u0002]\u0001\u0000\u0000\u0000\u0002_\u0001"+ + "\u0000\u0000\u0000\u0002a\u0001\u0000\u0000\u0000\u0002c\u0001\u0000\u0000"+ + "\u0000\u0002e\u0001\u0000\u0000\u0000\u0002g\u0001\u0000\u0000\u0000\u0002"+ + "i\u0001\u0000\u0000\u0000\u0002k\u0001\u0000\u0000\u0000\u0002m\u0001"+ + "\u0000\u0000\u0000\u0002o\u0001\u0000\u0000\u0000\u0002q\u0001\u0000\u0000"+ + "\u0000\u0002s\u0001\u0000\u0000\u0000\u0002u\u0001\u0000\u0000\u0000\u0002"+ + "w\u0001\u0000\u0000\u0000\u0002y\u0001\u0000\u0000\u0000\u0002{\u0001"+ + "\u0000\u0000\u0000\u0002}\u0001\u0000\u0000\u0000\u0002\u007f\u0001\u0000"+ + "\u0000\u0000\u0002\u0081\u0001\u0000\u0000\u0000\u0002\u0083\u0001\u0000"+ + "\u0000\u0000\u0002\u0085\u0001\u0000\u0000\u0000\u0002\u0087\u0001\u0000"+ + "\u0000\u0000\u0002\u0089\u0001\u0000\u0000\u0000\u0002\u008b\u0001\u0000"+ + "\u0000\u0000\u0002\u008d\u0001\u0000\u0000\u0000\u0002\u008f\u0001\u0000"+ + "\u0000\u0000\u0002\u0091\u0001\u0000\u0000\u0000\u0002\u0093\u0001\u0000"+ + "\u0000\u0000\u0002\u0095\u0001\u0000\u0000\u0000\u0002\u0097\u0001\u0000"+ + "\u0000\u0000\u0002\u0099\u0001\u0000\u0000\u0000\u0002\u009b\u0001\u0000"+ + "\u0000\u0000\u0002\u009d\u0001\u0000\u0000\u0000\u0002\u009f\u0001\u0000"+ + "\u0000\u0000\u0002\u00a1\u0001\u0000\u0000\u0000\u0002\u00a3\u0001\u0000"+ + "\u0000\u0000\u0002\u00a5\u0001\u0000\u0000\u0000\u0002\u00a7\u0001\u0000"+ + "\u0000\u0000\u0002\u00a9\u0001\u0000\u0000\u0000\u0002\u00ab\u0001\u0000"+ + "\u0000\u0000\u0003\u00ad\u0001\u0000\u0000\u0000\u0003\u00af\u0001\u0000"+ + "\u0000\u0000\u0003\u00b1\u0001\u0000\u0000\u0000\u0003\u00b3\u0001\u0000"+ + "\u0000\u0000\u0003\u00b5\u0001\u0000\u0000\u0000\u0003\u00b7\u0001\u0000"+ + "\u0000\u0000\u0003\u00bb\u0001\u0000\u0000\u0000\u0003\u00bd\u0001\u0000"+ + "\u0000\u0000\u0003\u00bf\u0001\u0000\u0000\u0000\u0003\u00c1\u0001\u0000"+ + "\u0000\u0000\u0003\u00c3\u0001\u0000\u0000\u0000\u0004\u00c5\u0001\u0000"+ + "\u0000\u0000\u0004\u00c7\u0001\u0000\u0000\u0000\u0004\u00c9\u0001\u0000"+ + "\u0000\u0000\u0004\u00cd\u0001\u0000\u0000\u0000\u0004\u00cf\u0001\u0000"+ + "\u0000\u0000\u0004\u00d1\u0001\u0000\u0000\u0000\u0004\u00d3\u0001\u0000"+ + "\u0000\u0000\u0004\u00d5\u0001\u0000\u0000\u0000\u0004\u00d7\u0001\u0000"+ + "\u0000\u0000\u0005\u00d9\u0001\u0000\u0000\u0000\u0005\u00db\u0001\u0000"+ + "\u0000\u0000\u0005\u00dd\u0001\u0000\u0000\u0000\u0005\u00df\u0001\u0000"+ + "\u0000\u0000\u0005\u00e1\u0001\u0000\u0000\u0000\u0005\u00e3\u0001\u0000"+ + "\u0000\u0000\u0005\u00e5\u0001\u0000\u0000\u0000\u0005\u00e7\u0001\u0000"+ + "\u0000\u0000\u0005\u00e9\u0001\u0000\u0000\u0000\u0005\u00eb\u0001\u0000"+ + "\u0000\u0000\u0006\u00ed\u0001\u0000\u0000\u0000\u0006\u00ef\u0001\u0000"+ + "\u0000\u0000\u0006\u00f1\u0001\u0000\u0000\u0000\u0006\u00f3\u0001\u0000"+ + "\u0000\u0000\u0006\u00f7\u0001\u0000\u0000\u0000\u0006\u00f9\u0001\u0000"+ + "\u0000\u0000\u0006\u00fb\u0001\u0000\u0000\u0000\u0006\u00fd\u0001\u0000"+ + "\u0000\u0000\u0006\u00ff\u0001\u0000\u0000\u0000\u0006\u0101\u0001\u0000"+ + "\u0000\u0000\u0007\u0103\u0001\u0000\u0000\u0000\u0007\u0105\u0001\u0000"+ + "\u0000\u0000\u0007\u0107\u0001\u0000\u0000\u0000\u0007\u0109\u0001\u0000"+ + "\u0000\u0000\u0007\u010b\u0001\u0000\u0000\u0000\u0007\u010d\u0001\u0000"+ + "\u0000\u0000\u0007\u010f\u0001\u0000\u0000\u0000\u0007\u0111\u0001\u0000"+ + "\u0000\u0000\u0007\u0113\u0001\u0000\u0000\u0000\u0007\u0115\u0001\u0000"+ + "\u0000\u0000\b\u0117\u0001\u0000\u0000\u0000\b\u0119\u0001\u0000\u0000"+ + "\u0000\b\u011b\u0001\u0000\u0000\u0000\b\u011d\u0001\u0000\u0000\u0000"+ + "\b\u011f\u0001\u0000\u0000\u0000\b\u0121\u0001\u0000\u0000\u0000\b\u0123"+ + "\u0001\u0000\u0000\u0000\t\u0125\u0001\u0000\u0000\u0000\t\u0127\u0001"+ + "\u0000\u0000\u0000\t\u0129\u0001\u0000\u0000\u0000\t\u012b\u0001\u0000"+ + "\u0000\u0000\t\u012d\u0001\u0000\u0000\u0000\t\u012f\u0001\u0000\u0000"+ + "\u0000\n\u0131\u0001\u0000\u0000\u0000\n\u0133\u0001\u0000\u0000\u0000"+ + "\n\u0135\u0001\u0000\u0000\u0000\n\u0137\u0001\u0000\u0000\u0000\n\u0139"+ + "\u0001\u0000\u0000\u0000\n\u013b\u0001\u0000\u0000\u0000\u000b\u013d\u0001"+ + "\u0000\u0000\u0000\r\u0147\u0001\u0000\u0000\u0000\u000f\u014e\u0001\u0000"+ + "\u0000\u0000\u0011\u0157\u0001\u0000\u0000\u0000\u0013\u015e\u0001\u0000"+ + "\u0000\u0000\u0015\u0168\u0001\u0000\u0000\u0000\u0017\u016f\u0001\u0000"+ + "\u0000\u0000\u0019\u0176\u0001\u0000\u0000\u0000\u001b\u0184\u0001\u0000"+ + "\u0000\u0000\u001d\u018b\u0001\u0000\u0000\u0000\u001f\u0193\u0001\u0000"+ + "\u0000\u0000!\u019f\u0001\u0000\u0000\u0000#\u01a8\u0001\u0000\u0000\u0000"+ + "%\u01ae\u0001\u0000\u0000\u0000\'\u01b5\u0001\u0000\u0000\u0000)\u01bc"+ + "\u0001\u0000\u0000\u0000+\u01c4\u0001\u0000\u0000\u0000-\u01cd\u0001\u0000"+ + "\u0000\u0000/\u01d3\u0001\u0000\u0000\u00001\u01e4\u0001\u0000\u0000\u0000"+ + "3\u01f4\u0001\u0000\u0000\u00005\u01fa\u0001\u0000\u0000\u00007\u01ff"+ + "\u0001\u0000\u0000\u00009\u0204\u0001\u0000\u0000\u0000;\u0208\u0001\u0000"+ + "\u0000\u0000=\u020c\u0001\u0000\u0000\u0000?\u0210\u0001\u0000\u0000\u0000"+ + "A\u0214\u0001\u0000\u0000\u0000C\u0216\u0001\u0000\u0000\u0000E\u0218"+ + "\u0001\u0000\u0000\u0000G\u021b\u0001\u0000\u0000\u0000I\u021d\u0001\u0000"+ + "\u0000\u0000K\u0226\u0001\u0000\u0000\u0000M\u0228\u0001\u0000\u0000\u0000"+ + "O\u022d\u0001\u0000\u0000\u0000Q\u022f\u0001\u0000\u0000\u0000S\u0234"+ + "\u0001\u0000\u0000\u0000U\u0253\u0001\u0000\u0000\u0000W\u0256\u0001\u0000"+ + "\u0000\u0000Y\u0284\u0001\u0000\u0000\u0000[\u0286\u0001\u0000\u0000\u0000"+ + "]\u0289\u0001\u0000\u0000\u0000_\u028d\u0001\u0000\u0000\u0000a\u0291"+ + "\u0001\u0000\u0000\u0000c\u0293\u0001\u0000\u0000\u0000e\u0295\u0001\u0000"+ + "\u0000\u0000g\u029a\u0001\u0000\u0000\u0000i\u029c\u0001\u0000\u0000\u0000"+ + "k\u02a2\u0001\u0000\u0000\u0000m\u02a8\u0001\u0000\u0000\u0000o\u02ad"+ + "\u0001\u0000\u0000\u0000q\u02af\u0001\u0000\u0000\u0000s\u02b2\u0001\u0000"+ + "\u0000\u0000u\u02b5\u0001\u0000\u0000\u0000w\u02ba\u0001\u0000\u0000\u0000"+ + "y\u02be\u0001\u0000\u0000\u0000{\u02c3\u0001\u0000\u0000\u0000}\u02c9"+ + "\u0001\u0000\u0000\u0000\u007f\u02cc\u0001\u0000\u0000\u0000\u0081\u02ce"+ + "\u0001\u0000\u0000\u0000\u0083\u02d4\u0001\u0000\u0000\u0000\u0085\u02d6"+ + "\u0001\u0000\u0000\u0000\u0087\u02db\u0001\u0000\u0000\u0000\u0089\u02de"+ + "\u0001\u0000\u0000\u0000\u008b\u02e1\u0001\u0000\u0000\u0000\u008d\u02e4"+ + "\u0001\u0000\u0000\u0000\u008f\u02e6\u0001\u0000\u0000\u0000\u0091\u02e9"+ + "\u0001\u0000\u0000\u0000\u0093\u02eb\u0001\u0000\u0000\u0000\u0095\u02ee"+ + "\u0001\u0000\u0000\u0000\u0097\u02f0\u0001\u0000\u0000\u0000\u0099\u02f2"+ + "\u0001\u0000\u0000\u0000\u009b\u02f4\u0001\u0000\u0000\u0000\u009d\u02f6"+ + "\u0001\u0000\u0000\u0000\u009f\u02f8\u0001\u0000\u0000\u0000\u00a1\u02fd"+ + "\u0001\u0000\u0000\u0000\u00a3\u0312\u0001\u0000\u0000\u0000\u00a5\u0314"+ + "\u0001\u0000\u0000\u0000\u00a7\u031c\u0001\u0000\u0000\u0000\u00a9\u0320"+ + "\u0001\u0000\u0000\u0000\u00ab\u0324\u0001\u0000\u0000\u0000\u00ad\u0328"+ + "\u0001\u0000\u0000\u0000\u00af\u032d\u0001\u0000\u0000\u0000\u00b1\u0331"+ + "\u0001\u0000\u0000\u0000\u00b3\u0335\u0001\u0000\u0000\u0000\u00b5\u0339"+ + "\u0001\u0000\u0000\u0000\u00b7\u033d\u0001\u0000\u0000\u0000\u00b9\u0349"+ + "\u0001\u0000\u0000\u0000\u00bb\u034c\u0001\u0000\u0000\u0000\u00bd\u0350"+ + "\u0001\u0000\u0000\u0000\u00bf\u0354\u0001\u0000\u0000\u0000\u00c1\u0358"+ + "\u0001\u0000\u0000\u0000\u00c3\u035c\u0001\u0000\u0000\u0000\u00c5\u0360"+ + "\u0001\u0000\u0000\u0000\u00c7\u0365\u0001\u0000\u0000\u0000\u00c9\u0369"+ + "\u0001\u0000\u0000\u0000\u00cb\u0371\u0001\u0000\u0000\u0000\u00cd\u0386"+ + "\u0001\u0000\u0000\u0000\u00cf\u0388\u0001\u0000\u0000\u0000\u00d1\u038c"+ + "\u0001\u0000\u0000\u0000\u00d3\u0390\u0001\u0000\u0000\u0000\u00d5\u0394"+ + "\u0001\u0000\u0000\u0000\u00d7\u0398\u0001\u0000\u0000\u0000\u00d9\u039c"+ + "\u0001\u0000\u0000\u0000\u00db\u03a1\u0001\u0000\u0000\u0000\u00dd\u03a5"+ + "\u0001\u0000\u0000\u0000\u00df\u03a9\u0001\u0000\u0000\u0000\u00e1\u03ad"+ + "\u0001\u0000\u0000\u0000\u00e3\u03b0\u0001\u0000\u0000\u0000\u00e5\u03b4"+ + "\u0001\u0000\u0000\u0000\u00e7\u03b8\u0001\u0000\u0000\u0000\u00e9\u03bc"+ + "\u0001\u0000\u0000\u0000\u00eb\u03c0\u0001\u0000\u0000\u0000\u00ed\u03c4"+ + "\u0001\u0000\u0000\u0000\u00ef\u03c9\u0001\u0000\u0000\u0000\u00f1\u03ce"+ + "\u0001\u0000\u0000\u0000\u00f3\u03d3\u0001\u0000\u0000\u0000\u00f5\u03da"+ + "\u0001\u0000\u0000\u0000\u00f7\u03de\u0001\u0000\u0000\u0000\u00f9\u03e6"+ + "\u0001\u0000\u0000\u0000\u00fb\u03ea\u0001\u0000\u0000\u0000\u00fd\u03ee"+ + "\u0001\u0000\u0000\u0000\u00ff\u03f2\u0001\u0000\u0000\u0000\u0101\u03f6"+ + "\u0001\u0000\u0000\u0000\u0103\u03fa\u0001\u0000\u0000\u0000\u0105\u0400"+ + "\u0001\u0000\u0000\u0000\u0107\u0404\u0001\u0000\u0000\u0000\u0109\u0408"+ + "\u0001\u0000\u0000\u0000\u010b\u040c\u0001\u0000\u0000\u0000\u010d\u0410"+ + "\u0001\u0000\u0000\u0000\u010f\u0414\u0001\u0000\u0000\u0000\u0111\u0418"+ + "\u0001\u0000\u0000\u0000\u0113\u041c\u0001\u0000\u0000\u0000\u0115\u0420"+ + "\u0001\u0000\u0000\u0000\u0117\u0424\u0001\u0000\u0000\u0000\u0119\u0429"+ + "\u0001\u0000\u0000\u0000\u011b\u042d\u0001\u0000\u0000\u0000\u011d\u0431"+ + "\u0001\u0000\u0000\u0000\u011f\u0435\u0001\u0000\u0000\u0000\u0121\u0439"+ + "\u0001\u0000\u0000\u0000\u0123\u043d\u0001\u0000\u0000\u0000\u0125\u0441"+ + "\u0001\u0000\u0000\u0000\u0127\u0446\u0001\u0000\u0000\u0000\u0129\u044b"+ + "\u0001\u0000\u0000\u0000\u012b\u0455\u0001\u0000\u0000\u0000\u012d\u0459"+ + "\u0001\u0000\u0000\u0000\u012f\u045d\u0001\u0000\u0000\u0000\u0131\u0461"+ + "\u0001\u0000\u0000\u0000\u0133\u0466\u0001\u0000\u0000\u0000\u0135\u046d"+ + "\u0001\u0000\u0000\u0000\u0137\u0471\u0001\u0000\u0000\u0000\u0139\u0475"+ + "\u0001\u0000\u0000\u0000\u013b\u0479\u0001\u0000\u0000\u0000\u013d\u013e"+ + "\u0005d\u0000\u0000\u013e\u013f\u0005i\u0000\u0000\u013f\u0140\u0005s"+ + "\u0000\u0000\u0140\u0141\u0005s\u0000\u0000\u0141\u0142\u0005e\u0000\u0000"+ + "\u0142\u0143\u0005c\u0000\u0000\u0143\u0144\u0005t\u0000\u0000\u0144\u0145"+ + "\u0001\u0000\u0000\u0000\u0145\u0146\u0006\u0000\u0000\u0000\u0146\f\u0001"+ + "\u0000\u0000\u0000\u0147\u0148\u0005d\u0000\u0000\u0148\u0149\u0005r\u0000"+ + "\u0000\u0149\u014a\u0005o\u0000\u0000\u014a\u014b\u0005p\u0000\u0000\u014b"+ + "\u014c\u0001\u0000\u0000\u0000\u014c\u014d\u0006\u0001\u0001\u0000\u014d"+ + "\u000e\u0001\u0000\u0000\u0000\u014e\u014f\u0005e\u0000\u0000\u014f\u0150"+ + "\u0005n\u0000\u0000\u0150\u0151\u0005r\u0000\u0000\u0151\u0152\u0005i"+ + "\u0000\u0000\u0152\u0153\u0005c\u0000\u0000\u0153\u0154\u0005h\u0000\u0000"+ + "\u0154\u0155\u0001\u0000\u0000\u0000\u0155\u0156\u0006\u0002\u0002\u0000"+ + "\u0156\u0010\u0001\u0000\u0000\u0000\u0157\u0158\u0005e\u0000\u0000\u0158"+ + "\u0159\u0005v\u0000\u0000\u0159\u015a\u0005a\u0000\u0000\u015a\u015b\u0005"+ + "l\u0000\u0000\u015b\u015c\u0001\u0000\u0000\u0000\u015c\u015d\u0006\u0003"+ + "\u0000\u0000\u015d\u0012\u0001\u0000\u0000\u0000\u015e\u015f\u0005e\u0000"+ + "\u0000\u015f\u0160\u0005x\u0000\u0000\u0160\u0161\u0005p\u0000\u0000\u0161"+ + "\u0162\u0005l\u0000\u0000\u0162\u0163\u0005a\u0000\u0000\u0163\u0164\u0005"+ + "i\u0000\u0000\u0164\u0165\u0005n\u0000\u0000\u0165\u0166\u0001\u0000\u0000"+ + "\u0000\u0166\u0167\u0006\u0004\u0003\u0000\u0167\u0014\u0001\u0000\u0000"+ + "\u0000\u0168\u0169\u0005f\u0000\u0000\u0169\u016a\u0005r\u0000\u0000\u016a"+ + "\u016b\u0005o\u0000\u0000\u016b\u016c\u0005m\u0000\u0000\u016c\u016d\u0001"+ + "\u0000\u0000\u0000\u016d\u016e\u0006\u0005\u0004\u0000\u016e\u0016\u0001"+ + "\u0000\u0000\u0000\u016f\u0170\u0005g\u0000\u0000\u0170\u0171\u0005r\u0000"+ + "\u0000\u0171\u0172\u0005o\u0000\u0000\u0172\u0173\u0005k\u0000\u0000\u0173"+ + "\u0174\u0001\u0000\u0000\u0000\u0174\u0175\u0006\u0006\u0000\u0000\u0175"+ + "\u0018\u0001\u0000\u0000\u0000\u0176\u0177\u0005i\u0000\u0000\u0177\u0178"+ + "\u0005n\u0000\u0000\u0178\u0179\u0005l\u0000\u0000\u0179\u017a\u0005i"+ + "\u0000\u0000\u017a\u017b\u0005n\u0000\u0000\u017b\u017c\u0005e\u0000\u0000"+ + "\u017c\u017d\u0005s\u0000\u0000\u017d\u017e\u0005t\u0000\u0000\u017e\u017f"+ + "\u0005a\u0000\u0000\u017f\u0180\u0005t\u0000\u0000\u0180\u0181\u0005s"+ + "\u0000\u0000\u0181\u0182\u0001\u0000\u0000\u0000\u0182\u0183\u0006\u0007"+ + "\u0000\u0000\u0183\u001a\u0001\u0000\u0000\u0000\u0184\u0185\u0005k\u0000"+ + "\u0000\u0185\u0186\u0005e\u0000\u0000\u0186\u0187\u0005e\u0000\u0000\u0187"+ + "\u0188\u0005p\u0000\u0000\u0188\u0189\u0001\u0000\u0000\u0000\u0189\u018a"+ + "\u0006\b\u0001\u0000\u018a\u001c\u0001\u0000\u0000\u0000\u018b\u018c\u0005"+ + "l\u0000\u0000\u018c\u018d\u0005i\u0000\u0000\u018d\u018e\u0005m\u0000"+ + "\u0000\u018e\u018f\u0005i\u0000\u0000\u018f\u0190\u0005t\u0000\u0000\u0190"+ + "\u0191\u0001\u0000\u0000\u0000\u0191\u0192\u0006\t\u0000\u0000\u0192\u001e"+ + "\u0001\u0000\u0000\u0000\u0193\u0194\u0005m\u0000\u0000\u0194\u0195\u0005"+ + "v\u0000\u0000\u0195\u0196\u0005_\u0000\u0000\u0196\u0197\u0005e\u0000"+ + "\u0000\u0197\u0198\u0005x\u0000\u0000\u0198\u0199\u0005p\u0000\u0000\u0199"+ + "\u019a\u0005a\u0000\u0000\u019a\u019b\u0005n\u0000\u0000\u019b\u019c\u0005"+ + "d\u0000\u0000\u019c\u019d\u0001\u0000\u0000\u0000\u019d\u019e\u0006\n"+ + "\u0005\u0000\u019e \u0001\u0000\u0000\u0000\u019f\u01a0\u0005r\u0000\u0000"+ + "\u01a0\u01a1\u0005e\u0000\u0000\u01a1\u01a2\u0005n\u0000\u0000\u01a2\u01a3"+ + "\u0005a\u0000\u0000\u01a3\u01a4\u0005m\u0000\u0000\u01a4\u01a5\u0005e"+ + "\u0000\u0000\u01a5\u01a6\u0001\u0000\u0000\u0000\u01a6\u01a7\u0006\u000b"+ + "\u0006\u0000\u01a7\"\u0001\u0000\u0000\u0000\u01a8\u01a9\u0005r\u0000"+ + "\u0000\u01a9\u01aa\u0005o\u0000\u0000\u01aa\u01ab\u0005w\u0000\u0000\u01ab"+ + "\u01ac\u0001\u0000\u0000\u0000\u01ac\u01ad\u0006\f\u0000\u0000\u01ad$"+ + "\u0001\u0000\u0000\u0000\u01ae\u01af\u0005s\u0000\u0000\u01af\u01b0\u0005"+ + "h\u0000\u0000\u01b0\u01b1\u0005o\u0000\u0000\u01b1\u01b2\u0005w\u0000"+ + "\u0000\u01b2\u01b3\u0001\u0000\u0000\u0000\u01b3\u01b4\u0006\r\u0007\u0000"+ + "\u01b4&\u0001\u0000\u0000\u0000\u01b5\u01b6\u0005s\u0000\u0000\u01b6\u01b7"+ + "\u0005o\u0000\u0000\u01b7\u01b8\u0005r\u0000\u0000\u01b8\u01b9\u0005t"+ + "\u0000\u0000\u01b9\u01ba\u0001\u0000\u0000\u0000\u01ba\u01bb\u0006\u000e"+ + "\u0000\u0000\u01bb(\u0001\u0000\u0000\u0000\u01bc\u01bd\u0005s\u0000\u0000"+ + "\u01bd\u01be\u0005t\u0000\u0000\u01be\u01bf\u0005a\u0000\u0000\u01bf\u01c0"+ + "\u0005t\u0000\u0000\u01c0\u01c1\u0005s\u0000\u0000\u01c1\u01c2\u0001\u0000"+ + "\u0000\u0000\u01c2\u01c3\u0006\u000f\u0000\u0000\u01c3*\u0001\u0000\u0000"+ + "\u0000\u01c4\u01c5\u0005w\u0000\u0000\u01c5\u01c6\u0005h\u0000\u0000\u01c6"+ + "\u01c7\u0005e\u0000\u0000\u01c7\u01c8\u0005r\u0000\u0000\u01c8\u01c9\u0005"+ + "e\u0000\u0000\u01c9\u01ca\u0001\u0000\u0000\u0000\u01ca\u01cb\u0006\u0010"+ + "\u0000\u0000\u01cb,\u0001\u0000\u0000\u0000\u01cc\u01ce\b\u0000\u0000"+ + "\u0000\u01cd\u01cc\u0001\u0000\u0000\u0000\u01ce\u01cf\u0001\u0000\u0000"+ + "\u0000\u01cf\u01cd\u0001\u0000\u0000\u0000\u01cf\u01d0\u0001\u0000\u0000"+ + "\u0000\u01d0\u01d1\u0001\u0000\u0000\u0000\u01d1\u01d2\u0006\u0011\u0000"+ + "\u0000\u01d2.\u0001\u0000\u0000\u0000\u01d3\u01d4\u0005/\u0000\u0000\u01d4"+ + "\u01d5\u0005/\u0000\u0000\u01d5\u01d9\u0001\u0000\u0000\u0000\u01d6\u01d8"+ + "\b\u0001\u0000\u0000\u01d7\u01d6\u0001\u0000\u0000\u0000\u01d8\u01db\u0001"+ + "\u0000\u0000\u0000\u01d9\u01d7\u0001\u0000\u0000\u0000\u01d9\u01da\u0001"+ + "\u0000\u0000\u0000\u01da\u01dd\u0001\u0000\u0000\u0000\u01db\u01d9\u0001"+ + "\u0000\u0000\u0000\u01dc\u01de\u0005\r\u0000\u0000\u01dd\u01dc\u0001\u0000"+ + "\u0000\u0000\u01dd\u01de\u0001\u0000\u0000\u0000\u01de\u01e0\u0001\u0000"+ + "\u0000\u0000\u01df\u01e1\u0005\n\u0000\u0000\u01e0\u01df\u0001\u0000\u0000"+ + "\u0000\u01e0\u01e1\u0001\u0000\u0000\u0000\u01e1\u01e2\u0001\u0000\u0000"+ + "\u0000\u01e2\u01e3\u0006\u0012\b\u0000\u01e30\u0001\u0000\u0000\u0000"+ + "\u01e4\u01e5\u0005/\u0000\u0000\u01e5\u01e6\u0005*\u0000\u0000\u01e6\u01eb"+ + "\u0001\u0000\u0000\u0000\u01e7\u01ea\u00031\u0013\u0000\u01e8\u01ea\t"+ + "\u0000\u0000\u0000\u01e9\u01e7\u0001\u0000\u0000\u0000\u01e9\u01e8\u0001"+ + "\u0000\u0000\u0000\u01ea\u01ed\u0001\u0000\u0000\u0000\u01eb\u01ec\u0001"+ + "\u0000\u0000\u0000\u01eb\u01e9\u0001\u0000\u0000\u0000\u01ec\u01ee\u0001"+ + "\u0000\u0000\u0000\u01ed\u01eb\u0001\u0000\u0000\u0000\u01ee\u01ef\u0005"+ + "*\u0000\u0000\u01ef\u01f0\u0005/\u0000\u0000\u01f0\u01f1\u0001\u0000\u0000"+ + "\u0000\u01f1\u01f2\u0006\u0013\b\u0000\u01f22\u0001\u0000\u0000\u0000"+ + "\u01f3\u01f5\u0007\u0002\u0000\u0000\u01f4\u01f3\u0001\u0000\u0000\u0000"+ + "\u01f5\u01f6\u0001\u0000\u0000\u0000\u01f6\u01f4\u0001\u0000\u0000\u0000"+ + "\u01f6\u01f7\u0001\u0000\u0000\u0000\u01f7\u01f8\u0001\u0000\u0000\u0000"+ + "\u01f8\u01f9\u0006\u0014\b\u0000\u01f94\u0001\u0000\u0000\u0000\u01fa"+ + "\u01fb\u0003\u009fJ\u0000\u01fb\u01fc\u0001\u0000\u0000\u0000\u01fc\u01fd"+ + "\u0006\u0015\t\u0000\u01fd\u01fe\u0006\u0015\n\u0000\u01fe6\u0001\u0000"+ + "\u0000\u0000\u01ff\u0200\u0003?\u001a\u0000\u0200\u0201\u0001\u0000\u0000"+ + "\u0000\u0201\u0202\u0006\u0016\u000b\u0000\u0202\u0203\u0006\u0016\f\u0000"+ + "\u02038\u0001\u0000\u0000\u0000\u0204\u0205\u00033\u0014\u0000\u0205\u0206"+ + "\u0001\u0000\u0000\u0000\u0206\u0207\u0006\u0017\b\u0000\u0207:\u0001"+ + "\u0000\u0000\u0000\u0208\u0209\u0003/\u0012\u0000\u0209\u020a\u0001\u0000"+ + "\u0000\u0000\u020a\u020b\u0006\u0018\b\u0000\u020b<\u0001\u0000\u0000"+ + "\u0000\u020c\u020d\u00031\u0013\u0000\u020d\u020e\u0001\u0000\u0000\u0000"+ + "\u020e\u020f\u0006\u0019\b\u0000\u020f>\u0001\u0000\u0000\u0000\u0210"+ + "\u0211\u0005|\u0000\u0000\u0211\u0212\u0001\u0000\u0000\u0000\u0212\u0213"+ + "\u0006\u001a\f\u0000\u0213@\u0001\u0000\u0000\u0000\u0214\u0215\u0007"+ + "\u0003\u0000\u0000\u0215B\u0001\u0000\u0000\u0000\u0216\u0217\u0007\u0004"+ + "\u0000\u0000\u0217D\u0001\u0000\u0000\u0000\u0218\u0219\u0005\\\u0000"+ + "\u0000\u0219\u021a\u0007\u0005\u0000\u0000\u021aF\u0001\u0000\u0000\u0000"+ + "\u021b\u021c\b\u0006\u0000\u0000\u021cH\u0001\u0000\u0000\u0000\u021d"+ + "\u021f\u0007\u0007\u0000\u0000\u021e\u0220\u0007\b\u0000\u0000\u021f\u021e"+ + "\u0001\u0000\u0000\u0000\u021f\u0220\u0001\u0000\u0000\u0000\u0220\u0222"+ + "\u0001\u0000\u0000\u0000\u0221\u0223\u0003A\u001b\u0000\u0222\u0221\u0001"+ + "\u0000\u0000\u0000\u0223\u0224\u0001\u0000\u0000\u0000\u0224\u0222\u0001"+ + "\u0000\u0000\u0000\u0224\u0225\u0001\u0000\u0000\u0000\u0225J\u0001\u0000"+ + "\u0000\u0000\u0226\u0227\u0005@\u0000\u0000\u0227L\u0001\u0000\u0000\u0000"+ + "\u0228\u0229\u0005`\u0000\u0000\u0229N\u0001\u0000\u0000\u0000\u022a\u022e"+ + "\b\t\u0000\u0000\u022b\u022c\u0005`\u0000\u0000\u022c\u022e\u0005`\u0000"+ + "\u0000\u022d\u022a\u0001\u0000\u0000\u0000\u022d\u022b\u0001\u0000\u0000"+ + "\u0000\u022eP\u0001\u0000\u0000\u0000\u022f\u0230\u0005_\u0000\u0000\u0230"+ + "R\u0001\u0000\u0000\u0000\u0231\u0235\u0003C\u001c\u0000\u0232\u0235\u0003"+ + "A\u001b\u0000\u0233\u0235\u0003Q#\u0000\u0234\u0231\u0001\u0000\u0000"+ + "\u0000\u0234\u0232\u0001\u0000\u0000\u0000\u0234\u0233\u0001\u0000\u0000"+ + "\u0000\u0235T\u0001\u0000\u0000\u0000\u0236\u023b\u0005\"\u0000\u0000"+ + "\u0237\u023a\u0003E\u001d\u0000\u0238\u023a\u0003G\u001e\u0000\u0239\u0237"+ + "\u0001\u0000\u0000\u0000\u0239\u0238\u0001\u0000\u0000\u0000\u023a\u023d"+ + "\u0001\u0000\u0000\u0000\u023b\u0239\u0001\u0000\u0000\u0000\u023b\u023c"+ + "\u0001\u0000\u0000\u0000\u023c\u023e\u0001\u0000\u0000\u0000\u023d\u023b"+ + "\u0001\u0000\u0000\u0000\u023e\u0254\u0005\"\u0000\u0000\u023f\u0240\u0005"+ + "\"\u0000\u0000\u0240\u0241\u0005\"\u0000\u0000\u0241\u0242\u0005\"\u0000"+ + "\u0000\u0242\u0246\u0001\u0000\u0000\u0000\u0243\u0245\b\u0001\u0000\u0000"+ + "\u0244\u0243\u0001\u0000\u0000\u0000\u0245\u0248\u0001\u0000\u0000\u0000"+ + "\u0246\u0247\u0001\u0000\u0000\u0000\u0246\u0244\u0001\u0000\u0000\u0000"+ + "\u0247\u0249\u0001\u0000\u0000\u0000\u0248\u0246\u0001\u0000\u0000\u0000"+ + "\u0249\u024a\u0005\"\u0000\u0000\u024a\u024b\u0005\"\u0000\u0000\u024b"+ + "\u024c\u0005\"\u0000\u0000\u024c\u024e\u0001\u0000\u0000\u0000\u024d\u024f"+ + "\u0005\"\u0000\u0000\u024e\u024d\u0001\u0000\u0000\u0000\u024e\u024f\u0001"+ + "\u0000\u0000\u0000\u024f\u0251\u0001\u0000\u0000\u0000\u0250\u0252\u0005"+ + "\"\u0000\u0000\u0251\u0250\u0001\u0000\u0000\u0000\u0251\u0252\u0001\u0000"+ + "\u0000\u0000\u0252\u0254\u0001\u0000\u0000\u0000\u0253\u0236\u0001\u0000"+ + "\u0000\u0000\u0253\u023f\u0001\u0000\u0000\u0000\u0254V\u0001\u0000\u0000"+ + "\u0000\u0255\u0257\u0003A\u001b\u0000\u0256\u0255\u0001\u0000\u0000\u0000"+ + "\u0257\u0258\u0001\u0000\u0000\u0000\u0258\u0256\u0001\u0000\u0000\u0000"+ + "\u0258\u0259\u0001\u0000\u0000\u0000\u0259X\u0001\u0000\u0000\u0000\u025a"+ + "\u025c\u0003A\u001b\u0000\u025b\u025a\u0001\u0000\u0000\u0000\u025c\u025d"+ + "\u0001\u0000\u0000\u0000\u025d\u025b\u0001\u0000\u0000\u0000\u025d\u025e"+ + "\u0001\u0000\u0000\u0000\u025e\u025f\u0001\u0000\u0000\u0000\u025f\u0263"+ + "\u0003g.\u0000\u0260\u0262\u0003A\u001b\u0000\u0261\u0260\u0001\u0000"+ + "\u0000\u0000\u0262\u0265\u0001\u0000\u0000\u0000\u0263\u0261\u0001\u0000"+ + "\u0000\u0000\u0263\u0264\u0001\u0000\u0000\u0000\u0264\u0285\u0001\u0000"+ + "\u0000\u0000\u0265\u0263\u0001\u0000\u0000\u0000\u0266\u0268\u0003g.\u0000"+ + "\u0267\u0269\u0003A\u001b\u0000\u0268\u0267\u0001\u0000\u0000\u0000\u0269"+ + "\u026a\u0001\u0000\u0000\u0000\u026a\u0268\u0001\u0000\u0000\u0000\u026a"+ + "\u026b\u0001\u0000\u0000\u0000\u026b\u0285\u0001\u0000\u0000\u0000\u026c"+ + "\u026e\u0003A\u001b\u0000\u026d\u026c\u0001\u0000\u0000\u0000\u026e\u026f"+ + "\u0001\u0000\u0000\u0000\u026f\u026d\u0001\u0000\u0000\u0000\u026f\u0270"+ + "\u0001\u0000\u0000\u0000\u0270\u0278\u0001\u0000\u0000\u0000\u0271\u0275"+ + "\u0003g.\u0000\u0272\u0274\u0003A\u001b\u0000\u0273\u0272\u0001\u0000"+ + "\u0000\u0000\u0274\u0277\u0001\u0000\u0000\u0000\u0275\u0273\u0001\u0000"+ + "\u0000\u0000\u0275\u0276\u0001\u0000\u0000\u0000\u0276\u0279\u0001\u0000"+ + "\u0000\u0000\u0277\u0275\u0001\u0000\u0000\u0000\u0278\u0271\u0001\u0000"+ + "\u0000\u0000\u0278\u0279\u0001\u0000\u0000\u0000\u0279\u027a\u0001\u0000"+ + "\u0000\u0000\u027a\u027b\u0003I\u001f\u0000\u027b\u0285\u0001\u0000\u0000"+ + "\u0000\u027c\u027e\u0003g.\u0000\u027d\u027f\u0003A\u001b\u0000\u027e"+ + "\u027d\u0001\u0000\u0000\u0000\u027f\u0280\u0001\u0000\u0000\u0000\u0280"+ + "\u027e\u0001\u0000\u0000\u0000\u0280\u0281\u0001\u0000\u0000\u0000\u0281"+ + "\u0282\u0001\u0000\u0000\u0000\u0282\u0283\u0003I\u001f\u0000\u0283\u0285"+ + "\u0001\u0000\u0000\u0000\u0284\u025b\u0001\u0000\u0000\u0000\u0284\u0266"+ + "\u0001\u0000\u0000\u0000\u0284\u026d\u0001\u0000\u0000\u0000\u0284\u027c"+ + "\u0001\u0000\u0000\u0000\u0285Z\u0001\u0000\u0000\u0000\u0286\u0287\u0005"+ + "b\u0000\u0000\u0287\u0288\u0005y\u0000\u0000\u0288\\\u0001\u0000\u0000"+ + "\u0000\u0289\u028a\u0005a\u0000\u0000\u028a\u028b\u0005n\u0000\u0000\u028b"+ + "\u028c\u0005d\u0000\u0000\u028c^\u0001\u0000\u0000\u0000\u028d\u028e\u0005"+ + "a\u0000\u0000\u028e\u028f\u0005s\u0000\u0000\u028f\u0290\u0005c\u0000"+ + "\u0000\u0290`\u0001\u0000\u0000\u0000\u0291\u0292\u0005=\u0000\u0000\u0292"+ + "b\u0001\u0000\u0000\u0000\u0293\u0294\u0005,\u0000\u0000\u0294d\u0001"+ + "\u0000\u0000\u0000\u0295\u0296\u0005d\u0000\u0000\u0296\u0297\u0005e\u0000"+ + "\u0000\u0297\u0298\u0005s\u0000\u0000\u0298\u0299\u0005c\u0000\u0000\u0299"+ + "f\u0001\u0000\u0000\u0000\u029a\u029b\u0005.\u0000\u0000\u029bh\u0001"+ + "\u0000\u0000\u0000\u029c\u029d\u0005f\u0000\u0000\u029d\u029e\u0005a\u0000"+ + "\u0000\u029e\u029f\u0005l\u0000\u0000\u029f\u02a0\u0005s\u0000\u0000\u02a0"+ + "\u02a1\u0005e\u0000\u0000\u02a1j\u0001\u0000\u0000\u0000\u02a2\u02a3\u0005"+ + "f\u0000\u0000\u02a3\u02a4\u0005i\u0000\u0000\u02a4\u02a5\u0005r\u0000"+ + "\u0000\u02a5\u02a6\u0005s\u0000\u0000\u02a6\u02a7\u0005t\u0000\u0000\u02a7"+ + "l\u0001\u0000\u0000\u0000\u02a8\u02a9\u0005l\u0000\u0000\u02a9\u02aa\u0005"+ + "a\u0000\u0000\u02aa\u02ab\u0005s\u0000\u0000\u02ab\u02ac\u0005t\u0000"+ + "\u0000\u02acn\u0001\u0000\u0000\u0000\u02ad\u02ae\u0005(\u0000\u0000\u02ae"+ + "p\u0001\u0000\u0000\u0000\u02af\u02b0\u0005i\u0000\u0000\u02b0\u02b1\u0005"+ + "n\u0000\u0000\u02b1r\u0001\u0000\u0000\u0000\u02b2\u02b3\u0005i\u0000"+ + "\u0000\u02b3\u02b4\u0005s\u0000\u0000\u02b4t\u0001\u0000\u0000\u0000\u02b5"+ + "\u02b6\u0005l\u0000\u0000\u02b6\u02b7\u0005i\u0000\u0000\u02b7\u02b8\u0005"+ + "k\u0000\u0000\u02b8\u02b9\u0005e\u0000\u0000\u02b9v\u0001\u0000\u0000"+ + "\u0000\u02ba\u02bb\u0005n\u0000\u0000\u02bb\u02bc\u0005o\u0000\u0000\u02bc"+ + "\u02bd\u0005t\u0000\u0000\u02bdx\u0001\u0000\u0000\u0000\u02be\u02bf\u0005"+ + "n\u0000\u0000\u02bf\u02c0\u0005u\u0000\u0000\u02c0\u02c1\u0005l\u0000"+ + "\u0000\u02c1\u02c2\u0005l\u0000\u0000\u02c2z\u0001\u0000\u0000\u0000\u02c3"+ + "\u02c4\u0005n\u0000\u0000\u02c4\u02c5\u0005u\u0000\u0000\u02c5\u02c6\u0005"+ + "l\u0000\u0000\u02c6\u02c7\u0005l\u0000\u0000\u02c7\u02c8\u0005s\u0000"+ + "\u0000\u02c8|\u0001\u0000\u0000\u0000\u02c9\u02ca\u0005o\u0000\u0000\u02ca"+ + "\u02cb\u0005r\u0000\u0000\u02cb~\u0001\u0000\u0000\u0000\u02cc\u02cd\u0005"+ + "?\u0000\u0000\u02cd\u0080\u0001\u0000\u0000\u0000\u02ce\u02cf\u0005r\u0000"+ + "\u0000\u02cf\u02d0\u0005l\u0000\u0000\u02d0\u02d1\u0005i\u0000\u0000\u02d1"+ + "\u02d2\u0005k\u0000\u0000\u02d2\u02d3\u0005e\u0000\u0000\u02d3\u0082\u0001"+ + "\u0000\u0000\u0000\u02d4\u02d5\u0005)\u0000\u0000\u02d5\u0084\u0001\u0000"+ + "\u0000\u0000\u02d6\u02d7\u0005t\u0000\u0000\u02d7\u02d8\u0005r\u0000\u0000"+ + "\u02d8\u02d9\u0005u\u0000\u0000\u02d9\u02da\u0005e\u0000\u0000\u02da\u0086"+ + "\u0001\u0000\u0000\u0000\u02db\u02dc\u0005=\u0000\u0000\u02dc\u02dd\u0005"+ + "=\u0000\u0000\u02dd\u0088\u0001\u0000\u0000\u0000\u02de\u02df\u0005=\u0000"+ + "\u0000\u02df\u02e0\u0005~\u0000\u0000\u02e0\u008a\u0001\u0000\u0000\u0000"+ + "\u02e1\u02e2\u0005!\u0000\u0000\u02e2\u02e3\u0005=\u0000\u0000\u02e3\u008c"+ + "\u0001\u0000\u0000\u0000\u02e4\u02e5\u0005<\u0000\u0000\u02e5\u008e\u0001"+ + "\u0000\u0000\u0000\u02e6\u02e7\u0005<\u0000\u0000\u02e7\u02e8\u0005=\u0000"+ + "\u0000\u02e8\u0090\u0001\u0000\u0000\u0000\u02e9\u02ea\u0005>\u0000\u0000"+ + "\u02ea\u0092\u0001\u0000\u0000\u0000\u02eb\u02ec\u0005>\u0000\u0000\u02ec"+ + "\u02ed\u0005=\u0000\u0000\u02ed\u0094\u0001\u0000\u0000\u0000\u02ee\u02ef"+ + "\u0005+\u0000\u0000\u02ef\u0096\u0001\u0000\u0000\u0000\u02f0\u02f1\u0005"+ + "-\u0000\u0000\u02f1\u0098\u0001\u0000\u0000\u0000\u02f2\u02f3\u0005*\u0000"+ + "\u0000\u02f3\u009a\u0001\u0000\u0000\u0000\u02f4\u02f5\u0005/\u0000\u0000"+ + "\u02f5\u009c\u0001\u0000\u0000\u0000\u02f6\u02f7\u0005%\u0000\u0000\u02f7"+ + "\u009e\u0001\u0000\u0000\u0000\u02f8\u02f9\u0005[\u0000\u0000\u02f9\u02fa"+ + "\u0001\u0000\u0000\u0000\u02fa\u02fb\u0006J\u0000\u0000\u02fb\u02fc\u0006"+ + "J\u0000\u0000\u02fc\u00a0\u0001\u0000\u0000\u0000\u02fd\u02fe\u0005]\u0000"+ + "\u0000\u02fe\u02ff\u0001\u0000\u0000\u0000\u02ff\u0300\u0006K\f\u0000"+ + "\u0300\u0301\u0006K\f\u0000\u0301\u00a2\u0001\u0000\u0000\u0000\u0302"+ + "\u0306\u0003C\u001c\u0000\u0303\u0305\u0003S$\u0000\u0304\u0303\u0001"+ + "\u0000\u0000\u0000\u0305\u0308\u0001\u0000\u0000\u0000\u0306\u0304\u0001"+ + "\u0000\u0000\u0000\u0306\u0307\u0001\u0000\u0000\u0000\u0307\u0313\u0001"+ + "\u0000\u0000\u0000\u0308\u0306\u0001\u0000\u0000\u0000\u0309\u030c\u0003"+ + "Q#\u0000\u030a\u030c\u0003K \u0000\u030b\u0309\u0001\u0000\u0000\u0000"+ + "\u030b\u030a\u0001\u0000\u0000\u0000\u030c\u030e\u0001\u0000\u0000\u0000"+ + "\u030d\u030f\u0003S$\u0000\u030e\u030d\u0001\u0000\u0000\u0000\u030f\u0310"+ + "\u0001\u0000\u0000\u0000\u0310\u030e\u0001\u0000\u0000\u0000\u0310\u0311"+ + "\u0001\u0000\u0000\u0000\u0311\u0313\u0001\u0000\u0000\u0000\u0312\u0302"+ + "\u0001\u0000\u0000\u0000\u0312\u030b\u0001\u0000\u0000\u0000\u0313\u00a4"+ + "\u0001\u0000\u0000\u0000\u0314\u0316\u0003M!\u0000\u0315\u0317\u0003O"+ + "\"\u0000\u0316\u0315\u0001\u0000\u0000\u0000\u0317\u0318\u0001\u0000\u0000"+ + "\u0000\u0318\u0316\u0001\u0000\u0000\u0000\u0318\u0319\u0001\u0000\u0000"+ + "\u0000\u0319\u031a\u0001\u0000\u0000\u0000\u031a\u031b\u0003M!\u0000\u031b"+ + "\u00a6\u0001\u0000\u0000\u0000\u031c\u031d\u0003/\u0012\u0000\u031d\u031e"+ + "\u0001\u0000\u0000\u0000\u031e\u031f\u0006N\b\u0000\u031f\u00a8\u0001"+ + "\u0000\u0000\u0000\u0320\u0321\u00031\u0013\u0000\u0321\u0322\u0001\u0000"+ + "\u0000\u0000\u0322\u0323\u0006O\b\u0000\u0323\u00aa\u0001\u0000\u0000"+ + "\u0000\u0324\u0325\u00033\u0014\u0000\u0325\u0326\u0001\u0000\u0000\u0000"+ + "\u0326\u0327\u0006P\b\u0000\u0327\u00ac\u0001\u0000\u0000\u0000\u0328"+ + "\u0329\u0003?\u001a\u0000\u0329\u032a\u0001\u0000\u0000\u0000\u032a\u032b"+ + "\u0006Q\u000b\u0000\u032b\u032c\u0006Q\f\u0000\u032c\u00ae\u0001\u0000"+ + "\u0000\u0000\u032d\u032e\u0003\u009fJ\u0000\u032e\u032f\u0001\u0000\u0000"+ + "\u0000\u032f\u0330\u0006R\t\u0000\u0330\u00b0\u0001\u0000\u0000\u0000"+ + "\u0331\u0332\u0003\u00a1K\u0000\u0332\u0333\u0001\u0000\u0000\u0000\u0333"+ + "\u0334\u0006S\r\u0000\u0334\u00b2\u0001\u0000\u0000\u0000\u0335\u0336"+ + "\u0003c,\u0000\u0336\u0337\u0001\u0000\u0000\u0000\u0337\u0338\u0006T"+ + "\u000e\u0000\u0338\u00b4\u0001\u0000\u0000\u0000\u0339\u033a\u0003a+\u0000"+ + "\u033a\u033b\u0001\u0000\u0000\u0000\u033b\u033c\u0006U\u000f\u0000\u033c"+ + "\u00b6\u0001\u0000\u0000\u0000\u033d\u033e\u0005m\u0000\u0000\u033e\u033f"+ + "\u0005e\u0000\u0000\u033f\u0340\u0005t\u0000\u0000\u0340\u0341\u0005a"+ + "\u0000\u0000\u0341\u0342\u0005d\u0000\u0000\u0342\u0343\u0005a\u0000\u0000"+ + "\u0343\u0344\u0005t\u0000\u0000\u0344\u0345\u0005a\u0000\u0000\u0345\u00b8"+ + "\u0001\u0000\u0000\u0000\u0346\u034a\b\n\u0000\u0000\u0347\u0348\u0005"+ + "/\u0000\u0000\u0348\u034a\b\u000b\u0000\u0000\u0349\u0346\u0001\u0000"+ + "\u0000\u0000\u0349\u0347\u0001\u0000\u0000\u0000\u034a\u00ba\u0001\u0000"+ + "\u0000\u0000\u034b\u034d\u0003\u00b9W\u0000\u034c\u034b\u0001\u0000\u0000"+ + "\u0000\u034d\u034e\u0001\u0000\u0000\u0000\u034e\u034c\u0001\u0000\u0000"+ + "\u0000\u034e\u034f\u0001\u0000\u0000\u0000\u034f\u00bc\u0001\u0000\u0000"+ + "\u0000\u0350\u0351\u0003\u00a5M\u0000\u0351\u0352\u0001\u0000\u0000\u0000"+ + "\u0352\u0353\u0006Y\u0010\u0000\u0353\u00be\u0001\u0000\u0000\u0000\u0354"+ + "\u0355\u0003/\u0012\u0000\u0355\u0356\u0001\u0000\u0000\u0000\u0356\u0357"+ + "\u0006Z\b\u0000\u0357\u00c0\u0001\u0000\u0000\u0000\u0358\u0359\u0003"+ + "1\u0013\u0000\u0359\u035a\u0001\u0000\u0000\u0000\u035a\u035b\u0006[\b"+ + "\u0000\u035b\u00c2\u0001\u0000\u0000\u0000\u035c\u035d\u00033\u0014\u0000"+ + "\u035d\u035e\u0001\u0000\u0000\u0000\u035e\u035f\u0006\\\b\u0000\u035f"+ + "\u00c4\u0001\u0000\u0000\u0000\u0360\u0361\u0003?\u001a\u0000\u0361\u0362"+ + "\u0001\u0000\u0000\u0000\u0362\u0363\u0006]\u000b\u0000\u0363\u0364\u0006"+ + "]\f\u0000\u0364\u00c6\u0001\u0000\u0000\u0000\u0365\u0366\u0003g.\u0000"+ + "\u0366\u0367\u0001\u0000\u0000\u0000\u0367\u0368\u0006^\u0011\u0000\u0368"+ + "\u00c8\u0001\u0000\u0000\u0000\u0369\u036a\u0003c,\u0000\u036a\u036b\u0001"+ + "\u0000\u0000\u0000\u036b\u036c\u0006_\u000e\u0000\u036c\u00ca\u0001\u0000"+ + "\u0000\u0000\u036d\u0372\u0003C\u001c\u0000\u036e\u0372\u0003A\u001b\u0000"+ + "\u036f\u0372\u0003Q#\u0000\u0370\u0372\u0003\u0099G\u0000\u0371\u036d"+ + "\u0001\u0000\u0000\u0000\u0371\u036e\u0001\u0000\u0000\u0000\u0371\u036f"+ + "\u0001\u0000\u0000\u0000\u0371\u0370\u0001\u0000\u0000\u0000\u0372\u00cc"+ + "\u0001\u0000\u0000\u0000\u0373\u0376\u0003C\u001c\u0000\u0374\u0376\u0003"+ + "\u0099G\u0000\u0375\u0373\u0001\u0000\u0000\u0000\u0375\u0374\u0001\u0000"+ + "\u0000\u0000\u0376\u037a\u0001\u0000\u0000\u0000\u0377\u0379\u0003\u00cb"+ + "`\u0000\u0378\u0377\u0001\u0000\u0000\u0000\u0379\u037c\u0001\u0000\u0000"+ + "\u0000\u037a\u0378\u0001\u0000\u0000\u0000\u037a\u037b\u0001\u0000\u0000"+ + "\u0000\u037b\u0387\u0001\u0000\u0000\u0000\u037c\u037a\u0001\u0000\u0000"+ + "\u0000\u037d\u0380\u0003Q#\u0000\u037e\u0380\u0003K \u0000\u037f\u037d"+ + "\u0001\u0000\u0000\u0000\u037f\u037e\u0001\u0000\u0000\u0000\u0380\u0382"+ + "\u0001\u0000\u0000\u0000\u0381\u0383\u0003\u00cb`\u0000\u0382\u0381\u0001"+ + "\u0000\u0000\u0000\u0383\u0384\u0001\u0000\u0000\u0000\u0384\u0382\u0001"+ + "\u0000\u0000\u0000\u0384\u0385\u0001\u0000\u0000\u0000\u0385\u0387\u0001"+ + "\u0000\u0000\u0000\u0386\u0375\u0001\u0000\u0000\u0000\u0386\u037f\u0001"+ + "\u0000\u0000\u0000\u0387\u00ce\u0001\u0000\u0000\u0000\u0388\u0389\u0003"+ + "\u00cda\u0000\u0389\u038a\u0001\u0000\u0000\u0000\u038a\u038b\u0006b\u0012"+ + "\u0000\u038b\u00d0\u0001\u0000\u0000\u0000\u038c\u038d\u0003\u00a5M\u0000"+ + "\u038d\u038e\u0001\u0000\u0000\u0000\u038e\u038f\u0006c\u0010\u0000\u038f"+ + "\u00d2\u0001\u0000\u0000\u0000\u0390\u0391\u0003/\u0012\u0000\u0391\u0392"+ + "\u0001\u0000\u0000\u0000\u0392\u0393\u0006d\b\u0000\u0393\u00d4\u0001"+ + "\u0000\u0000\u0000\u0394\u0395\u00031\u0013\u0000\u0395\u0396\u0001\u0000"+ + "\u0000\u0000\u0396\u0397\u0006e\b\u0000\u0397\u00d6\u0001\u0000\u0000"+ + "\u0000\u0398\u0399\u00033\u0014\u0000\u0399\u039a\u0001\u0000\u0000\u0000"+ + "\u039a\u039b\u0006f\b\u0000\u039b\u00d8\u0001\u0000\u0000\u0000\u039c"+ + "\u039d\u0003?\u001a\u0000\u039d\u039e\u0001\u0000\u0000\u0000\u039e\u039f"+ + "\u0006g\u000b\u0000\u039f\u03a0\u0006g\f\u0000\u03a0\u00da\u0001\u0000"+ + "\u0000\u0000\u03a1\u03a2\u0003a+\u0000\u03a2\u03a3\u0001\u0000\u0000\u0000"+ + "\u03a3\u03a4\u0006h\u000f\u0000\u03a4\u00dc\u0001\u0000\u0000\u0000\u03a5"+ + "\u03a6\u0003c,\u0000\u03a6\u03a7\u0001\u0000\u0000\u0000\u03a7\u03a8\u0006"+ + "i\u000e\u0000\u03a8\u00de\u0001\u0000\u0000\u0000\u03a9\u03aa\u0003g."+ + "\u0000\u03aa\u03ab\u0001\u0000\u0000\u0000\u03ab\u03ac\u0006j\u0011\u0000"+ + "\u03ac\u00e0\u0001\u0000\u0000\u0000\u03ad\u03ae\u0005a\u0000\u0000\u03ae"+ + "\u03af\u0005s\u0000\u0000\u03af\u00e2\u0001\u0000\u0000\u0000\u03b0\u03b1"+ + "\u0003\u00a5M\u0000\u03b1\u03b2\u0001\u0000\u0000\u0000\u03b2\u03b3\u0006"+ + "l\u0010\u0000\u03b3\u00e4\u0001\u0000\u0000\u0000\u03b4\u03b5\u0003\u00cd"+ + "a\u0000\u03b5\u03b6\u0001\u0000\u0000\u0000\u03b6\u03b7\u0006m\u0012\u0000"+ + "\u03b7\u00e6\u0001\u0000\u0000\u0000\u03b8\u03b9\u0003/\u0012\u0000\u03b9"+ + "\u03ba\u0001\u0000\u0000\u0000\u03ba\u03bb\u0006n\b\u0000\u03bb\u00e8"+ + "\u0001\u0000\u0000\u0000\u03bc\u03bd\u00031\u0013\u0000\u03bd\u03be\u0001"+ + "\u0000\u0000\u0000\u03be\u03bf\u0006o\b\u0000\u03bf\u00ea\u0001\u0000"+ + "\u0000\u0000\u03c0\u03c1\u00033\u0014\u0000\u03c1\u03c2\u0001\u0000\u0000"+ + "\u0000\u03c2\u03c3\u0006p\b\u0000\u03c3\u00ec\u0001\u0000\u0000\u0000"+ + "\u03c4\u03c5\u0003?\u001a\u0000\u03c5\u03c6\u0001\u0000\u0000\u0000\u03c6"+ + "\u03c7\u0006q\u000b\u0000\u03c7\u03c8\u0006q\f\u0000\u03c8\u00ee\u0001"+ + "\u0000\u0000\u0000\u03c9\u03ca\u0003\u009fJ\u0000\u03ca\u03cb\u0001\u0000"+ + "\u0000\u0000\u03cb\u03cc\u0006r\t\u0000\u03cc\u03cd\u0006r\u0013\u0000"+ + "\u03cd\u00f0\u0001\u0000\u0000\u0000\u03ce\u03cf\u0005o\u0000\u0000\u03cf"+ + "\u03d0\u0005n\u0000\u0000\u03d0\u03d1\u0001\u0000\u0000\u0000\u03d1\u03d2"+ + "\u0006s\u0014\u0000\u03d2\u00f2\u0001\u0000\u0000\u0000\u03d3\u03d4\u0005"+ + "w\u0000\u0000\u03d4\u03d5\u0005i\u0000\u0000\u03d5\u03d6\u0005t\u0000"+ + "\u0000\u03d6\u03d7\u0005h\u0000\u0000\u03d7\u03d8\u0001\u0000\u0000\u0000"+ + "\u03d8\u03d9\u0006t\u0014\u0000\u03d9\u00f4\u0001\u0000\u0000\u0000\u03da"+ + "\u03db\b\f\u0000\u0000\u03db\u00f6\u0001\u0000\u0000\u0000\u03dc\u03df"+ + "\u0003C\u001c\u0000\u03dd\u03df\u0003A\u001b\u0000\u03de\u03dc\u0001\u0000"+ + "\u0000\u0000\u03de\u03dd\u0001\u0000\u0000\u0000\u03df\u03e3\u0001\u0000"+ + "\u0000\u0000\u03e0\u03e2\u0003\u00f5u\u0000\u03e1\u03e0\u0001\u0000\u0000"+ + "\u0000\u03e2\u03e5\u0001\u0000\u0000\u0000\u03e3\u03e1\u0001\u0000\u0000"+ + "\u0000\u03e3\u03e4\u0001\u0000\u0000\u0000\u03e4\u00f8\u0001\u0000\u0000"+ + "\u0000\u03e5\u03e3\u0001\u0000\u0000\u0000\u03e6\u03e7\u0003\u00a5M\u0000"+ + "\u03e7\u03e8\u0001\u0000\u0000\u0000\u03e8\u03e9\u0006w\u0010\u0000\u03e9"+ + "\u00fa\u0001\u0000\u0000\u0000\u03ea\u03eb\u0003\u00f7v\u0000\u03eb\u03ec"+ + "\u0001\u0000\u0000\u0000\u03ec\u03ed\u0006x\u0015\u0000\u03ed\u00fc\u0001"+ + "\u0000\u0000\u0000\u03ee\u03ef\u0003/\u0012\u0000\u03ef\u03f0\u0001\u0000"+ + "\u0000\u0000\u03f0\u03f1\u0006y\b\u0000\u03f1\u00fe\u0001\u0000\u0000"+ + "\u0000\u03f2\u03f3\u00031\u0013\u0000\u03f3\u03f4\u0001\u0000\u0000\u0000"+ + "\u03f4\u03f5\u0006z\b\u0000\u03f5\u0100\u0001\u0000\u0000\u0000\u03f6"+ + "\u03f7\u00033\u0014\u0000\u03f7\u03f8\u0001\u0000\u0000\u0000\u03f8\u03f9"+ + "\u0006{\b\u0000\u03f9\u0102\u0001\u0000\u0000\u0000\u03fa\u03fb\u0003"+ + "?\u001a\u0000\u03fb\u03fc\u0001\u0000\u0000\u0000\u03fc\u03fd\u0006|\u000b"+ + "\u0000\u03fd\u03fe\u0006|\f\u0000\u03fe\u03ff\u0006|\f\u0000\u03ff\u0104"+ + "\u0001\u0000\u0000\u0000\u0400\u0401\u0003a+\u0000\u0401\u0402\u0001\u0000"+ + "\u0000\u0000\u0402\u0403\u0006}\u000f\u0000\u0403\u0106\u0001\u0000\u0000"+ + "\u0000\u0404\u0405\u0003c,\u0000\u0405\u0406\u0001\u0000\u0000\u0000\u0406"+ + "\u0407\u0006~\u000e\u0000\u0407\u0108\u0001\u0000\u0000\u0000\u0408\u0409"+ + "\u0003g.\u0000\u0409\u040a\u0001\u0000\u0000\u0000\u040a\u040b\u0006\u007f"+ + "\u0011\u0000\u040b\u010a\u0001\u0000\u0000\u0000\u040c\u040d\u0003\u00f3"+ + "t\u0000\u040d\u040e\u0001\u0000\u0000\u0000\u040e\u040f\u0006\u0080\u0016"+ + "\u0000\u040f\u010c\u0001\u0000\u0000\u0000\u0410\u0411\u0003\u00cda\u0000"+ + "\u0411\u0412\u0001\u0000\u0000\u0000\u0412\u0413\u0006\u0081\u0012\u0000"+ + "\u0413\u010e\u0001\u0000\u0000\u0000\u0414\u0415\u0003\u00a5M\u0000\u0415"+ + "\u0416\u0001\u0000\u0000\u0000\u0416\u0417\u0006\u0082\u0010\u0000\u0417"+ + "\u0110\u0001\u0000\u0000\u0000\u0418\u0419\u0003/\u0012\u0000\u0419\u041a"+ + "\u0001\u0000\u0000\u0000\u041a\u041b\u0006\u0083\b\u0000\u041b\u0112\u0001"+ + "\u0000\u0000\u0000\u041c\u041d\u00031\u0013\u0000\u041d\u041e\u0001\u0000"+ + "\u0000\u0000\u041e\u041f\u0006\u0084\b\u0000\u041f\u0114\u0001\u0000\u0000"+ + "\u0000\u0420\u0421\u00033\u0014\u0000\u0421\u0422\u0001\u0000\u0000\u0000"+ + "\u0422\u0423\u0006\u0085\b\u0000\u0423\u0116\u0001\u0000\u0000\u0000\u0424"+ + "\u0425\u0003?\u001a\u0000\u0425\u0426\u0001\u0000\u0000\u0000\u0426\u0427"+ + "\u0006\u0086\u000b\u0000\u0427\u0428\u0006\u0086\f\u0000\u0428\u0118\u0001"+ + "\u0000\u0000\u0000\u0429\u042a\u0003g.\u0000\u042a\u042b\u0001\u0000\u0000"+ + "\u0000\u042b\u042c\u0006\u0087\u0011\u0000\u042c\u011a\u0001\u0000\u0000"+ + "\u0000\u042d\u042e\u0003\u00a5M\u0000\u042e\u042f\u0001\u0000\u0000\u0000"+ + "\u042f\u0430\u0006\u0088\u0010\u0000\u0430\u011c\u0001\u0000\u0000\u0000"+ + "\u0431\u0432\u0003\u00a3L\u0000\u0432\u0433\u0001\u0000\u0000\u0000\u0433"+ + "\u0434\u0006\u0089\u0017\u0000\u0434\u011e\u0001\u0000\u0000\u0000\u0435"+ + "\u0436\u0003/\u0012\u0000\u0436\u0437\u0001\u0000\u0000\u0000\u0437\u0438"+ + "\u0006\u008a\b\u0000\u0438\u0120\u0001\u0000\u0000\u0000\u0439\u043a\u0003"+ + "1\u0013\u0000\u043a\u043b\u0001\u0000\u0000\u0000\u043b\u043c\u0006\u008b"+ + "\b\u0000\u043c\u0122\u0001\u0000\u0000\u0000\u043d\u043e\u00033\u0014"+ + "\u0000\u043e\u043f\u0001\u0000\u0000\u0000\u043f\u0440\u0006\u008c\b\u0000"+ + "\u0440\u0124\u0001\u0000\u0000\u0000\u0441\u0442\u0003?\u001a\u0000\u0442"+ + "\u0443\u0001\u0000\u0000\u0000\u0443\u0444\u0006\u008d\u000b\u0000\u0444"+ + "\u0445\u0006\u008d\f\u0000\u0445\u0126\u0001\u0000\u0000\u0000\u0446\u0447"+ + "\u0005i\u0000\u0000\u0447\u0448\u0005n\u0000\u0000\u0448\u0449\u0005f"+ + "\u0000\u0000\u0449\u044a\u0005o\u0000\u0000\u044a\u0128\u0001\u0000\u0000"+ + "\u0000\u044b\u044c\u0005f\u0000\u0000\u044c\u044d\u0005u\u0000\u0000\u044d"+ + "\u044e\u0005n\u0000\u0000\u044e\u044f\u0005c\u0000\u0000\u044f\u0450\u0005"+ + "t\u0000\u0000\u0450\u0451\u0005i\u0000\u0000\u0451\u0452\u0005o\u0000"+ + "\u0000\u0452\u0453\u0005n\u0000\u0000\u0453\u0454\u0005s\u0000\u0000\u0454"+ + "\u012a\u0001\u0000\u0000\u0000\u0455\u0456\u0003/\u0012\u0000\u0456\u0457"+ + "\u0001\u0000\u0000\u0000\u0457\u0458\u0006\u0090\b\u0000\u0458\u012c\u0001"+ + "\u0000\u0000\u0000\u0459\u045a\u00031\u0013\u0000\u045a\u045b\u0001\u0000"+ + "\u0000\u0000\u045b\u045c\u0006\u0091\b\u0000\u045c\u012e\u0001\u0000\u0000"+ + "\u0000\u045d\u045e\u00033\u0014\u0000\u045e\u045f\u0001\u0000\u0000\u0000"+ + "\u045f\u0460\u0006\u0092\b\u0000\u0460\u0130\u0001\u0000\u0000\u0000\u0461"+ + "\u0462\u0003\u00a1K\u0000\u0462\u0463\u0001\u0000\u0000\u0000\u0463\u0464"+ + "\u0006\u0093\r\u0000\u0464\u0465\u0006\u0093\f\u0000\u0465\u0132\u0001"+ + "\u0000\u0000\u0000\u0466\u0467\u0005:\u0000\u0000\u0467\u0134\u0001\u0000"+ + "\u0000\u0000\u0468\u046e\u0003K \u0000\u0469\u046e\u0003A\u001b\u0000"+ + "\u046a\u046e\u0003g.\u0000\u046b\u046e\u0003C\u001c\u0000\u046c\u046e"+ + "\u0003Q#\u0000\u046d\u0468\u0001\u0000\u0000\u0000\u046d\u0469\u0001\u0000"+ + "\u0000\u0000\u046d\u046a\u0001\u0000\u0000\u0000\u046d\u046b\u0001\u0000"+ + "\u0000\u0000\u046d\u046c\u0001\u0000\u0000\u0000\u046e\u046f\u0001\u0000"+ + "\u0000\u0000\u046f\u046d\u0001\u0000\u0000\u0000\u046f\u0470\u0001\u0000"+ + "\u0000\u0000\u0470\u0136\u0001\u0000\u0000\u0000\u0471\u0472\u0003/\u0012"+ + "\u0000\u0472\u0473\u0001\u0000\u0000\u0000\u0473\u0474\u0006\u0096\b\u0000"+ + "\u0474\u0138\u0001\u0000\u0000\u0000\u0475\u0476\u00031\u0013\u0000\u0476"+ + "\u0477\u0001\u0000\u0000\u0000\u0477\u0478\u0006\u0097\b\u0000\u0478\u013a"+ + "\u0001\u0000\u0000\u0000\u0479\u047a\u00033\u0014\u0000\u047a\u047b\u0001"+ + "\u0000\u0000\u0000\u047b\u047c\u0006\u0098\b\u0000\u047c\u013c\u0001\u0000"+ + "\u0000\u00006\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u01cf"+ + "\u01d9\u01dd\u01e0\u01e9\u01eb\u01f6\u021f\u0224\u022d\u0234\u0239\u023b"+ + "\u0246\u024e\u0251\u0253\u0258\u025d\u0263\u026a\u026f\u0275\u0278\u0280"+ + "\u0284\u0306\u030b\u0310\u0312\u0318\u0349\u034e\u0371\u0375\u037a\u037f"+ + "\u0384\u0386\u03de\u03e3\u046d\u046f\u0018\u0005\u0002\u0000\u0005\u0004"+ + "\u0000\u0005\u0006\u0000\u0005\u0001\u0000\u0005\u0003\u0000\u0005\b\u0000"+ + "\u0005\u0005\u0000\u0005\t\u0000\u0000\u0001\u0000\u0007?\u0000\u0005"+ + "\u0000\u0000\u0007\u0019\u0000\u0004\u0000\u0000\u0007@\u0000\u0007!\u0000"+ + "\u0007 \u0000\u0007B\u0000\u0007#\u0000\u0007K\u0000\u0005\n\u0000\u0005"+ + "\u0007\u0000\u0007U\u0000\u0007T\u0000\u0007A\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index 424662cd9626f..8a0a7cd0bf46e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -11,7 +11,6 @@ null 'keep' 'limit' 'mv_expand' -'project' 'rename' 'row' 'show' @@ -119,7 +118,6 @@ INLINESTATS KEEP LIMIT MV_EXPAND -PROJECT RENAME ROW SHOW @@ -267,4 +265,4 @@ setting atn: -[4, 1, 105, 523, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 108, 8, 1, 10, 1, 12, 1, 111, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 117, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 132, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 144, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 151, 8, 5, 10, 5, 12, 5, 154, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 161, 8, 5, 1, 5, 1, 5, 3, 5, 165, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 173, 8, 5, 10, 5, 12, 5, 176, 9, 5, 1, 6, 1, 6, 3, 6, 180, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 187, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 192, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 199, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 205, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 213, 8, 8, 10, 8, 12, 8, 216, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 225, 8, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 233, 8, 10, 10, 10, 12, 10, 236, 9, 10, 3, 10, 238, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 5, 12, 248, 8, 12, 10, 12, 12, 12, 251, 9, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 258, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 5, 14, 264, 8, 14, 10, 14, 12, 14, 267, 9, 14, 1, 14, 3, 14, 270, 8, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 5, 15, 277, 8, 15, 10, 15, 12, 15, 280, 9, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 3, 17, 289, 8, 17, 1, 17, 1, 17, 3, 17, 293, 8, 17, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 299, 8, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 5, 20, 306, 8, 20, 10, 20, 12, 20, 309, 9, 20, 1, 21, 1, 21, 1, 21, 5, 21, 314, 8, 21, 10, 21, 12, 21, 317, 9, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 336, 8, 24, 10, 24, 12, 24, 339, 9, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 347, 8, 24, 10, 24, 12, 24, 350, 9, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 358, 8, 24, 10, 24, 12, 24, 361, 9, 24, 1, 24, 1, 24, 3, 24, 365, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 374, 8, 26, 10, 26, 12, 26, 377, 9, 26, 1, 27, 1, 27, 3, 27, 381, 8, 27, 1, 27, 1, 27, 3, 27, 385, 8, 27, 1, 28, 1, 28, 1, 28, 1, 28, 5, 28, 391, 8, 28, 10, 28, 12, 28, 394, 9, 28, 1, 28, 1, 28, 1, 28, 1, 28, 5, 28, 400, 8, 28, 10, 28, 12, 28, 403, 9, 28, 3, 28, 405, 8, 28, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 411, 8, 29, 10, 29, 12, 29, 414, 9, 29, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 420, 8, 30, 10, 30, 12, 30, 423, 9, 30, 1, 31, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 3, 32, 433, 8, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 5, 35, 445, 8, 35, 10, 35, 12, 35, 448, 9, 35, 1, 36, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 38, 1, 38, 3, 38, 458, 8, 38, 1, 39, 3, 39, 461, 8, 39, 1, 39, 1, 39, 1, 40, 3, 40, 466, 8, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 3, 45, 485, 8, 45, 1, 46, 1, 46, 5, 46, 489, 8, 46, 10, 46, 12, 46, 492, 9, 46, 1, 46, 1, 46, 1, 46, 3, 46, 497, 8, 46, 1, 46, 1, 46, 1, 46, 1, 46, 5, 46, 503, 8, 46, 10, 46, 12, 46, 506, 9, 46, 3, 46, 508, 8, 46, 1, 47, 1, 47, 1, 47, 3, 47, 513, 8, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 0, 3, 2, 10, 16, 49, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 0, 9, 1, 0, 59, 60, 1, 0, 61, 63, 2, 0, 67, 67, 72, 72, 1, 0, 66, 67, 2, 0, 67, 67, 76, 76, 2, 0, 32, 32, 35, 35, 1, 0, 38, 39, 2, 0, 37, 37, 51, 51, 1, 0, 52, 58, 552, 0, 98, 1, 0, 0, 0, 2, 101, 1, 0, 0, 0, 4, 116, 1, 0, 0, 0, 6, 131, 1, 0, 0, 0, 8, 133, 1, 0, 0, 0, 10, 164, 1, 0, 0, 0, 12, 191, 1, 0, 0, 0, 14, 198, 1, 0, 0, 0, 16, 204, 1, 0, 0, 0, 18, 224, 1, 0, 0, 0, 20, 226, 1, 0, 0, 0, 22, 241, 1, 0, 0, 0, 24, 244, 1, 0, 0, 0, 26, 257, 1, 0, 0, 0, 28, 259, 1, 0, 0, 0, 30, 271, 1, 0, 0, 0, 32, 283, 1, 0, 0, 0, 34, 286, 1, 0, 0, 0, 36, 294, 1, 0, 0, 0, 38, 300, 1, 0, 0, 0, 40, 302, 1, 0, 0, 0, 42, 310, 1, 0, 0, 0, 44, 318, 1, 0, 0, 0, 46, 320, 1, 0, 0, 0, 48, 364, 1, 0, 0, 0, 50, 366, 1, 0, 0, 0, 52, 369, 1, 0, 0, 0, 54, 378, 1, 0, 0, 0, 56, 404, 1, 0, 0, 0, 58, 406, 1, 0, 0, 0, 60, 415, 1, 0, 0, 0, 62, 424, 1, 0, 0, 0, 64, 428, 1, 0, 0, 0, 66, 434, 1, 0, 0, 0, 68, 438, 1, 0, 0, 0, 70, 441, 1, 0, 0, 0, 72, 449, 1, 0, 0, 0, 74, 453, 1, 0, 0, 0, 76, 457, 1, 0, 0, 0, 78, 460, 1, 0, 0, 0, 80, 465, 1, 0, 0, 0, 82, 469, 1, 0, 0, 0, 84, 471, 1, 0, 0, 0, 86, 473, 1, 0, 0, 0, 88, 476, 1, 0, 0, 0, 90, 484, 1, 0, 0, 0, 92, 486, 1, 0, 0, 0, 94, 512, 1, 0, 0, 0, 96, 516, 1, 0, 0, 0, 98, 99, 3, 2, 1, 0, 99, 100, 5, 0, 0, 1, 100, 1, 1, 0, 0, 0, 101, 102, 6, 1, -1, 0, 102, 103, 3, 4, 2, 0, 103, 109, 1, 0, 0, 0, 104, 105, 10, 1, 0, 0, 105, 106, 5, 26, 0, 0, 106, 108, 3, 6, 3, 0, 107, 104, 1, 0, 0, 0, 108, 111, 1, 0, 0, 0, 109, 107, 1, 0, 0, 0, 109, 110, 1, 0, 0, 0, 110, 3, 1, 0, 0, 0, 111, 109, 1, 0, 0, 0, 112, 117, 3, 86, 43, 0, 113, 117, 3, 28, 14, 0, 114, 117, 3, 22, 11, 0, 115, 117, 3, 90, 45, 0, 116, 112, 1, 0, 0, 0, 116, 113, 1, 0, 0, 0, 116, 114, 1, 0, 0, 0, 116, 115, 1, 0, 0, 0, 117, 5, 1, 0, 0, 0, 118, 132, 3, 32, 16, 0, 119, 132, 3, 36, 18, 0, 120, 132, 3, 50, 25, 0, 121, 132, 3, 56, 28, 0, 122, 132, 3, 52, 26, 0, 123, 132, 3, 34, 17, 0, 124, 132, 3, 8, 4, 0, 125, 132, 3, 58, 29, 0, 126, 132, 3, 60, 30, 0, 127, 132, 3, 64, 32, 0, 128, 132, 3, 66, 33, 0, 129, 132, 3, 92, 46, 0, 130, 132, 3, 68, 34, 0, 131, 118, 1, 0, 0, 0, 131, 119, 1, 0, 0, 0, 131, 120, 1, 0, 0, 0, 131, 121, 1, 0, 0, 0, 131, 122, 1, 0, 0, 0, 131, 123, 1, 0, 0, 0, 131, 124, 1, 0, 0, 0, 131, 125, 1, 0, 0, 0, 131, 126, 1, 0, 0, 0, 131, 127, 1, 0, 0, 0, 131, 128, 1, 0, 0, 0, 131, 129, 1, 0, 0, 0, 131, 130, 1, 0, 0, 0, 132, 7, 1, 0, 0, 0, 133, 134, 5, 18, 0, 0, 134, 135, 3, 10, 5, 0, 135, 9, 1, 0, 0, 0, 136, 137, 6, 5, -1, 0, 137, 138, 5, 44, 0, 0, 138, 165, 3, 10, 5, 7, 139, 165, 3, 14, 7, 0, 140, 165, 3, 12, 6, 0, 141, 143, 3, 14, 7, 0, 142, 144, 5, 44, 0, 0, 143, 142, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144, 145, 1, 0, 0, 0, 145, 146, 5, 41, 0, 0, 146, 147, 5, 40, 0, 0, 147, 152, 3, 14, 7, 0, 148, 149, 5, 34, 0, 0, 149, 151, 3, 14, 7, 0, 150, 148, 1, 0, 0, 0, 151, 154, 1, 0, 0, 0, 152, 150, 1, 0, 0, 0, 152, 153, 1, 0, 0, 0, 153, 155, 1, 0, 0, 0, 154, 152, 1, 0, 0, 0, 155, 156, 5, 50, 0, 0, 156, 165, 1, 0, 0, 0, 157, 158, 3, 14, 7, 0, 158, 160, 5, 42, 0, 0, 159, 161, 5, 44, 0, 0, 160, 159, 1, 0, 0, 0, 160, 161, 1, 0, 0, 0, 161, 162, 1, 0, 0, 0, 162, 163, 5, 45, 0, 0, 163, 165, 1, 0, 0, 0, 164, 136, 1, 0, 0, 0, 164, 139, 1, 0, 0, 0, 164, 140, 1, 0, 0, 0, 164, 141, 1, 0, 0, 0, 164, 157, 1, 0, 0, 0, 165, 174, 1, 0, 0, 0, 166, 167, 10, 4, 0, 0, 167, 168, 5, 31, 0, 0, 168, 173, 3, 10, 5, 5, 169, 170, 10, 3, 0, 0, 170, 171, 5, 47, 0, 0, 171, 173, 3, 10, 5, 4, 172, 166, 1, 0, 0, 0, 172, 169, 1, 0, 0, 0, 173, 176, 1, 0, 0, 0, 174, 172, 1, 0, 0, 0, 174, 175, 1, 0, 0, 0, 175, 11, 1, 0, 0, 0, 176, 174, 1, 0, 0, 0, 177, 179, 3, 14, 7, 0, 178, 180, 5, 44, 0, 0, 179, 178, 1, 0, 0, 0, 179, 180, 1, 0, 0, 0, 180, 181, 1, 0, 0, 0, 181, 182, 5, 43, 0, 0, 182, 183, 3, 82, 41, 0, 183, 192, 1, 0, 0, 0, 184, 186, 3, 14, 7, 0, 185, 187, 5, 44, 0, 0, 186, 185, 1, 0, 0, 0, 186, 187, 1, 0, 0, 0, 187, 188, 1, 0, 0, 0, 188, 189, 5, 49, 0, 0, 189, 190, 3, 82, 41, 0, 190, 192, 1, 0, 0, 0, 191, 177, 1, 0, 0, 0, 191, 184, 1, 0, 0, 0, 192, 13, 1, 0, 0, 0, 193, 199, 3, 16, 8, 0, 194, 195, 3, 16, 8, 0, 195, 196, 3, 84, 42, 0, 196, 197, 3, 16, 8, 0, 197, 199, 1, 0, 0, 0, 198, 193, 1, 0, 0, 0, 198, 194, 1, 0, 0, 0, 199, 15, 1, 0, 0, 0, 200, 201, 6, 8, -1, 0, 201, 205, 3, 18, 9, 0, 202, 203, 7, 0, 0, 0, 203, 205, 3, 16, 8, 3, 204, 200, 1, 0, 0, 0, 204, 202, 1, 0, 0, 0, 205, 214, 1, 0, 0, 0, 206, 207, 10, 2, 0, 0, 207, 208, 7, 1, 0, 0, 208, 213, 3, 16, 8, 3, 209, 210, 10, 1, 0, 0, 210, 211, 7, 0, 0, 0, 211, 213, 3, 16, 8, 2, 212, 206, 1, 0, 0, 0, 212, 209, 1, 0, 0, 0, 213, 216, 1, 0, 0, 0, 214, 212, 1, 0, 0, 0, 214, 215, 1, 0, 0, 0, 215, 17, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 217, 225, 3, 48, 24, 0, 218, 225, 3, 40, 20, 0, 219, 225, 3, 20, 10, 0, 220, 221, 5, 40, 0, 0, 221, 222, 3, 10, 5, 0, 222, 223, 5, 50, 0, 0, 223, 225, 1, 0, 0, 0, 224, 217, 1, 0, 0, 0, 224, 218, 1, 0, 0, 0, 224, 219, 1, 0, 0, 0, 224, 220, 1, 0, 0, 0, 225, 19, 1, 0, 0, 0, 226, 227, 3, 44, 22, 0, 227, 237, 5, 40, 0, 0, 228, 238, 5, 61, 0, 0, 229, 234, 3, 10, 5, 0, 230, 231, 5, 34, 0, 0, 231, 233, 3, 10, 5, 0, 232, 230, 1, 0, 0, 0, 233, 236, 1, 0, 0, 0, 234, 232, 1, 0, 0, 0, 234, 235, 1, 0, 0, 0, 235, 238, 1, 0, 0, 0, 236, 234, 1, 0, 0, 0, 237, 228, 1, 0, 0, 0, 237, 229, 1, 0, 0, 0, 237, 238, 1, 0, 0, 0, 238, 239, 1, 0, 0, 0, 239, 240, 5, 50, 0, 0, 240, 21, 1, 0, 0, 0, 241, 242, 5, 14, 0, 0, 242, 243, 3, 24, 12, 0, 243, 23, 1, 0, 0, 0, 244, 249, 3, 26, 13, 0, 245, 246, 5, 34, 0, 0, 246, 248, 3, 26, 13, 0, 247, 245, 1, 0, 0, 0, 248, 251, 1, 0, 0, 0, 249, 247, 1, 0, 0, 0, 249, 250, 1, 0, 0, 0, 250, 25, 1, 0, 0, 0, 251, 249, 1, 0, 0, 0, 252, 258, 3, 10, 5, 0, 253, 254, 3, 40, 20, 0, 254, 255, 5, 33, 0, 0, 255, 256, 3, 10, 5, 0, 256, 258, 1, 0, 0, 0, 257, 252, 1, 0, 0, 0, 257, 253, 1, 0, 0, 0, 258, 27, 1, 0, 0, 0, 259, 260, 5, 6, 0, 0, 260, 265, 3, 38, 19, 0, 261, 262, 5, 34, 0, 0, 262, 264, 3, 38, 19, 0, 263, 261, 1, 0, 0, 0, 264, 267, 1, 0, 0, 0, 265, 263, 1, 0, 0, 0, 265, 266, 1, 0, 0, 0, 266, 269, 1, 0, 0, 0, 267, 265, 1, 0, 0, 0, 268, 270, 3, 30, 15, 0, 269, 268, 1, 0, 0, 0, 269, 270, 1, 0, 0, 0, 270, 29, 1, 0, 0, 0, 271, 272, 5, 64, 0, 0, 272, 273, 5, 71, 0, 0, 273, 278, 3, 38, 19, 0, 274, 275, 5, 34, 0, 0, 275, 277, 3, 38, 19, 0, 276, 274, 1, 0, 0, 0, 277, 280, 1, 0, 0, 0, 278, 276, 1, 0, 0, 0, 278, 279, 1, 0, 0, 0, 279, 281, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 281, 282, 5, 65, 0, 0, 282, 31, 1, 0, 0, 0, 283, 284, 5, 4, 0, 0, 284, 285, 3, 24, 12, 0, 285, 33, 1, 0, 0, 0, 286, 288, 5, 17, 0, 0, 287, 289, 3, 24, 12, 0, 288, 287, 1, 0, 0, 0, 288, 289, 1, 0, 0, 0, 289, 292, 1, 0, 0, 0, 290, 291, 5, 30, 0, 0, 291, 293, 3, 24, 12, 0, 292, 290, 1, 0, 0, 0, 292, 293, 1, 0, 0, 0, 293, 35, 1, 0, 0, 0, 294, 295, 5, 8, 0, 0, 295, 298, 3, 24, 12, 0, 296, 297, 5, 30, 0, 0, 297, 299, 3, 24, 12, 0, 298, 296, 1, 0, 0, 0, 298, 299, 1, 0, 0, 0, 299, 37, 1, 0, 0, 0, 300, 301, 7, 2, 0, 0, 301, 39, 1, 0, 0, 0, 302, 307, 3, 44, 22, 0, 303, 304, 5, 36, 0, 0, 304, 306, 3, 44, 22, 0, 305, 303, 1, 0, 0, 0, 306, 309, 1, 0, 0, 0, 307, 305, 1, 0, 0, 0, 307, 308, 1, 0, 0, 0, 308, 41, 1, 0, 0, 0, 309, 307, 1, 0, 0, 0, 310, 315, 3, 46, 23, 0, 311, 312, 5, 36, 0, 0, 312, 314, 3, 46, 23, 0, 313, 311, 1, 0, 0, 0, 314, 317, 1, 0, 0, 0, 315, 313, 1, 0, 0, 0, 315, 316, 1, 0, 0, 0, 316, 43, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 318, 319, 7, 3, 0, 0, 319, 45, 1, 0, 0, 0, 320, 321, 7, 4, 0, 0, 321, 47, 1, 0, 0, 0, 322, 365, 5, 45, 0, 0, 323, 324, 3, 80, 40, 0, 324, 325, 5, 66, 0, 0, 325, 365, 1, 0, 0, 0, 326, 365, 3, 78, 39, 0, 327, 365, 3, 80, 40, 0, 328, 365, 3, 74, 37, 0, 329, 365, 5, 48, 0, 0, 330, 365, 3, 82, 41, 0, 331, 332, 5, 64, 0, 0, 332, 337, 3, 76, 38, 0, 333, 334, 5, 34, 0, 0, 334, 336, 3, 76, 38, 0, 335, 333, 1, 0, 0, 0, 336, 339, 1, 0, 0, 0, 337, 335, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 340, 1, 0, 0, 0, 339, 337, 1, 0, 0, 0, 340, 341, 5, 65, 0, 0, 341, 365, 1, 0, 0, 0, 342, 343, 5, 64, 0, 0, 343, 348, 3, 74, 37, 0, 344, 345, 5, 34, 0, 0, 345, 347, 3, 74, 37, 0, 346, 344, 1, 0, 0, 0, 347, 350, 1, 0, 0, 0, 348, 346, 1, 0, 0, 0, 348, 349, 1, 0, 0, 0, 349, 351, 1, 0, 0, 0, 350, 348, 1, 0, 0, 0, 351, 352, 5, 65, 0, 0, 352, 365, 1, 0, 0, 0, 353, 354, 5, 64, 0, 0, 354, 359, 3, 82, 41, 0, 355, 356, 5, 34, 0, 0, 356, 358, 3, 82, 41, 0, 357, 355, 1, 0, 0, 0, 358, 361, 1, 0, 0, 0, 359, 357, 1, 0, 0, 0, 359, 360, 1, 0, 0, 0, 360, 362, 1, 0, 0, 0, 361, 359, 1, 0, 0, 0, 362, 363, 5, 65, 0, 0, 363, 365, 1, 0, 0, 0, 364, 322, 1, 0, 0, 0, 364, 323, 1, 0, 0, 0, 364, 326, 1, 0, 0, 0, 364, 327, 1, 0, 0, 0, 364, 328, 1, 0, 0, 0, 364, 329, 1, 0, 0, 0, 364, 330, 1, 0, 0, 0, 364, 331, 1, 0, 0, 0, 364, 342, 1, 0, 0, 0, 364, 353, 1, 0, 0, 0, 365, 49, 1, 0, 0, 0, 366, 367, 5, 10, 0, 0, 367, 368, 5, 28, 0, 0, 368, 51, 1, 0, 0, 0, 369, 370, 5, 16, 0, 0, 370, 375, 3, 54, 27, 0, 371, 372, 5, 34, 0, 0, 372, 374, 3, 54, 27, 0, 373, 371, 1, 0, 0, 0, 374, 377, 1, 0, 0, 0, 375, 373, 1, 0, 0, 0, 375, 376, 1, 0, 0, 0, 376, 53, 1, 0, 0, 0, 377, 375, 1, 0, 0, 0, 378, 380, 3, 10, 5, 0, 379, 381, 7, 5, 0, 0, 380, 379, 1, 0, 0, 0, 380, 381, 1, 0, 0, 0, 381, 384, 1, 0, 0, 0, 382, 383, 5, 46, 0, 0, 383, 385, 7, 6, 0, 0, 384, 382, 1, 0, 0, 0, 384, 385, 1, 0, 0, 0, 385, 55, 1, 0, 0, 0, 386, 387, 5, 9, 0, 0, 387, 392, 3, 42, 21, 0, 388, 389, 5, 34, 0, 0, 389, 391, 3, 42, 21, 0, 390, 388, 1, 0, 0, 0, 391, 394, 1, 0, 0, 0, 392, 390, 1, 0, 0, 0, 392, 393, 1, 0, 0, 0, 393, 405, 1, 0, 0, 0, 394, 392, 1, 0, 0, 0, 395, 396, 5, 12, 0, 0, 396, 401, 3, 42, 21, 0, 397, 398, 5, 34, 0, 0, 398, 400, 3, 42, 21, 0, 399, 397, 1, 0, 0, 0, 400, 403, 1, 0, 0, 0, 401, 399, 1, 0, 0, 0, 401, 402, 1, 0, 0, 0, 402, 405, 1, 0, 0, 0, 403, 401, 1, 0, 0, 0, 404, 386, 1, 0, 0, 0, 404, 395, 1, 0, 0, 0, 405, 57, 1, 0, 0, 0, 406, 407, 5, 2, 0, 0, 407, 412, 3, 42, 21, 0, 408, 409, 5, 34, 0, 0, 409, 411, 3, 42, 21, 0, 410, 408, 1, 0, 0, 0, 411, 414, 1, 0, 0, 0, 412, 410, 1, 0, 0, 0, 412, 413, 1, 0, 0, 0, 413, 59, 1, 0, 0, 0, 414, 412, 1, 0, 0, 0, 415, 416, 5, 13, 0, 0, 416, 421, 3, 62, 31, 0, 417, 418, 5, 34, 0, 0, 418, 420, 3, 62, 31, 0, 419, 417, 1, 0, 0, 0, 420, 423, 1, 0, 0, 0, 421, 419, 1, 0, 0, 0, 421, 422, 1, 0, 0, 0, 422, 61, 1, 0, 0, 0, 423, 421, 1, 0, 0, 0, 424, 425, 3, 42, 21, 0, 425, 426, 5, 80, 0, 0, 426, 427, 3, 42, 21, 0, 427, 63, 1, 0, 0, 0, 428, 429, 5, 1, 0, 0, 429, 430, 3, 18, 9, 0, 430, 432, 3, 82, 41, 0, 431, 433, 3, 70, 35, 0, 432, 431, 1, 0, 0, 0, 432, 433, 1, 0, 0, 0, 433, 65, 1, 0, 0, 0, 434, 435, 5, 7, 0, 0, 435, 436, 3, 18, 9, 0, 436, 437, 3, 82, 41, 0, 437, 67, 1, 0, 0, 0, 438, 439, 5, 11, 0, 0, 439, 440, 3, 40, 20, 0, 440, 69, 1, 0, 0, 0, 441, 446, 3, 72, 36, 0, 442, 443, 5, 34, 0, 0, 443, 445, 3, 72, 36, 0, 444, 442, 1, 0, 0, 0, 445, 448, 1, 0, 0, 0, 446, 444, 1, 0, 0, 0, 446, 447, 1, 0, 0, 0, 447, 71, 1, 0, 0, 0, 448, 446, 1, 0, 0, 0, 449, 450, 3, 44, 22, 0, 450, 451, 5, 33, 0, 0, 451, 452, 3, 48, 24, 0, 452, 73, 1, 0, 0, 0, 453, 454, 7, 7, 0, 0, 454, 75, 1, 0, 0, 0, 455, 458, 3, 78, 39, 0, 456, 458, 3, 80, 40, 0, 457, 455, 1, 0, 0, 0, 457, 456, 1, 0, 0, 0, 458, 77, 1, 0, 0, 0, 459, 461, 7, 0, 0, 0, 460, 459, 1, 0, 0, 0, 460, 461, 1, 0, 0, 0, 461, 462, 1, 0, 0, 0, 462, 463, 5, 29, 0, 0, 463, 79, 1, 0, 0, 0, 464, 466, 7, 0, 0, 0, 465, 464, 1, 0, 0, 0, 465, 466, 1, 0, 0, 0, 466, 467, 1, 0, 0, 0, 467, 468, 5, 28, 0, 0, 468, 81, 1, 0, 0, 0, 469, 470, 5, 27, 0, 0, 470, 83, 1, 0, 0, 0, 471, 472, 7, 8, 0, 0, 472, 85, 1, 0, 0, 0, 473, 474, 5, 5, 0, 0, 474, 475, 3, 88, 44, 0, 475, 87, 1, 0, 0, 0, 476, 477, 5, 64, 0, 0, 477, 478, 3, 2, 1, 0, 478, 479, 5, 65, 0, 0, 479, 89, 1, 0, 0, 0, 480, 481, 5, 15, 0, 0, 481, 485, 5, 96, 0, 0, 482, 483, 5, 15, 0, 0, 483, 485, 5, 97, 0, 0, 484, 480, 1, 0, 0, 0, 484, 482, 1, 0, 0, 0, 485, 91, 1, 0, 0, 0, 486, 490, 5, 3, 0, 0, 487, 489, 3, 96, 48, 0, 488, 487, 1, 0, 0, 0, 489, 492, 1, 0, 0, 0, 490, 488, 1, 0, 0, 0, 490, 491, 1, 0, 0, 0, 491, 493, 1, 0, 0, 0, 492, 490, 1, 0, 0, 0, 493, 496, 5, 86, 0, 0, 494, 495, 5, 84, 0, 0, 495, 497, 3, 42, 21, 0, 496, 494, 1, 0, 0, 0, 496, 497, 1, 0, 0, 0, 497, 507, 1, 0, 0, 0, 498, 499, 5, 85, 0, 0, 499, 504, 3, 94, 47, 0, 500, 501, 5, 34, 0, 0, 501, 503, 3, 94, 47, 0, 502, 500, 1, 0, 0, 0, 503, 506, 1, 0, 0, 0, 504, 502, 1, 0, 0, 0, 504, 505, 1, 0, 0, 0, 505, 508, 1, 0, 0, 0, 506, 504, 1, 0, 0, 0, 507, 498, 1, 0, 0, 0, 507, 508, 1, 0, 0, 0, 508, 93, 1, 0, 0, 0, 509, 510, 3, 42, 21, 0, 510, 511, 5, 33, 0, 0, 511, 513, 1, 0, 0, 0, 512, 509, 1, 0, 0, 0, 512, 513, 1, 0, 0, 0, 513, 514, 1, 0, 0, 0, 514, 515, 3, 42, 21, 0, 515, 95, 1, 0, 0, 0, 516, 517, 5, 64, 0, 0, 517, 518, 5, 102, 0, 0, 518, 519, 5, 101, 0, 0, 519, 520, 5, 102, 0, 0, 520, 521, 5, 65, 0, 0, 521, 97, 1, 0, 0, 0, 52, 109, 116, 131, 143, 152, 160, 164, 172, 174, 179, 186, 191, 198, 204, 212, 214, 224, 234, 237, 249, 257, 265, 269, 278, 288, 292, 298, 307, 315, 337, 348, 359, 364, 375, 380, 384, 392, 401, 404, 412, 421, 432, 446, 457, 460, 465, 484, 490, 496, 504, 507, 512] \ No newline at end of file +[4, 1, 104, 512, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 108, 8, 1, 10, 1, 12, 1, 111, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 117, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 132, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 144, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 151, 8, 5, 10, 5, 12, 5, 154, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 161, 8, 5, 1, 5, 1, 5, 3, 5, 165, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 173, 8, 5, 10, 5, 12, 5, 176, 9, 5, 1, 6, 1, 6, 3, 6, 180, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 187, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 192, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 199, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 205, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 213, 8, 8, 10, 8, 12, 8, 216, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 225, 8, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 233, 8, 10, 10, 10, 12, 10, 236, 9, 10, 3, 10, 238, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 5, 12, 248, 8, 12, 10, 12, 12, 12, 251, 9, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 258, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 5, 14, 264, 8, 14, 10, 14, 12, 14, 267, 9, 14, 1, 14, 3, 14, 270, 8, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 5, 15, 277, 8, 15, 10, 15, 12, 15, 280, 9, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 3, 17, 289, 8, 17, 1, 17, 1, 17, 3, 17, 293, 8, 17, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 299, 8, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 5, 20, 306, 8, 20, 10, 20, 12, 20, 309, 9, 20, 1, 21, 1, 21, 1, 21, 5, 21, 314, 8, 21, 10, 21, 12, 21, 317, 9, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 336, 8, 24, 10, 24, 12, 24, 339, 9, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 347, 8, 24, 10, 24, 12, 24, 350, 9, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 358, 8, 24, 10, 24, 12, 24, 361, 9, 24, 1, 24, 1, 24, 3, 24, 365, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 374, 8, 26, 10, 26, 12, 26, 377, 9, 26, 1, 27, 1, 27, 3, 27, 381, 8, 27, 1, 27, 1, 27, 3, 27, 385, 8, 27, 1, 28, 1, 28, 1, 28, 1, 28, 5, 28, 391, 8, 28, 10, 28, 12, 28, 394, 9, 28, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 400, 8, 29, 10, 29, 12, 29, 403, 9, 29, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 409, 8, 30, 10, 30, 12, 30, 412, 9, 30, 1, 31, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 32, 3, 32, 422, 8, 32, 1, 33, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 5, 35, 434, 8, 35, 10, 35, 12, 35, 437, 9, 35, 1, 36, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 38, 1, 38, 3, 38, 447, 8, 38, 1, 39, 3, 39, 450, 8, 39, 1, 39, 1, 39, 1, 40, 3, 40, 455, 8, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 3, 45, 474, 8, 45, 1, 46, 1, 46, 5, 46, 478, 8, 46, 10, 46, 12, 46, 481, 9, 46, 1, 46, 1, 46, 1, 46, 3, 46, 486, 8, 46, 1, 46, 1, 46, 1, 46, 1, 46, 5, 46, 492, 8, 46, 10, 46, 12, 46, 495, 9, 46, 3, 46, 497, 8, 46, 1, 47, 1, 47, 1, 47, 3, 47, 502, 8, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 0, 3, 2, 10, 16, 49, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 0, 9, 1, 0, 58, 59, 1, 0, 60, 62, 2, 0, 66, 66, 71, 71, 1, 0, 65, 66, 2, 0, 66, 66, 75, 75, 2, 0, 31, 31, 34, 34, 1, 0, 37, 38, 2, 0, 36, 36, 50, 50, 1, 0, 51, 57, 539, 0, 98, 1, 0, 0, 0, 2, 101, 1, 0, 0, 0, 4, 116, 1, 0, 0, 0, 6, 131, 1, 0, 0, 0, 8, 133, 1, 0, 0, 0, 10, 164, 1, 0, 0, 0, 12, 191, 1, 0, 0, 0, 14, 198, 1, 0, 0, 0, 16, 204, 1, 0, 0, 0, 18, 224, 1, 0, 0, 0, 20, 226, 1, 0, 0, 0, 22, 241, 1, 0, 0, 0, 24, 244, 1, 0, 0, 0, 26, 257, 1, 0, 0, 0, 28, 259, 1, 0, 0, 0, 30, 271, 1, 0, 0, 0, 32, 283, 1, 0, 0, 0, 34, 286, 1, 0, 0, 0, 36, 294, 1, 0, 0, 0, 38, 300, 1, 0, 0, 0, 40, 302, 1, 0, 0, 0, 42, 310, 1, 0, 0, 0, 44, 318, 1, 0, 0, 0, 46, 320, 1, 0, 0, 0, 48, 364, 1, 0, 0, 0, 50, 366, 1, 0, 0, 0, 52, 369, 1, 0, 0, 0, 54, 378, 1, 0, 0, 0, 56, 386, 1, 0, 0, 0, 58, 395, 1, 0, 0, 0, 60, 404, 1, 0, 0, 0, 62, 413, 1, 0, 0, 0, 64, 417, 1, 0, 0, 0, 66, 423, 1, 0, 0, 0, 68, 427, 1, 0, 0, 0, 70, 430, 1, 0, 0, 0, 72, 438, 1, 0, 0, 0, 74, 442, 1, 0, 0, 0, 76, 446, 1, 0, 0, 0, 78, 449, 1, 0, 0, 0, 80, 454, 1, 0, 0, 0, 82, 458, 1, 0, 0, 0, 84, 460, 1, 0, 0, 0, 86, 462, 1, 0, 0, 0, 88, 465, 1, 0, 0, 0, 90, 473, 1, 0, 0, 0, 92, 475, 1, 0, 0, 0, 94, 501, 1, 0, 0, 0, 96, 505, 1, 0, 0, 0, 98, 99, 3, 2, 1, 0, 99, 100, 5, 0, 0, 1, 100, 1, 1, 0, 0, 0, 101, 102, 6, 1, -1, 0, 102, 103, 3, 4, 2, 0, 103, 109, 1, 0, 0, 0, 104, 105, 10, 1, 0, 0, 105, 106, 5, 25, 0, 0, 106, 108, 3, 6, 3, 0, 107, 104, 1, 0, 0, 0, 108, 111, 1, 0, 0, 0, 109, 107, 1, 0, 0, 0, 109, 110, 1, 0, 0, 0, 110, 3, 1, 0, 0, 0, 111, 109, 1, 0, 0, 0, 112, 117, 3, 86, 43, 0, 113, 117, 3, 28, 14, 0, 114, 117, 3, 22, 11, 0, 115, 117, 3, 90, 45, 0, 116, 112, 1, 0, 0, 0, 116, 113, 1, 0, 0, 0, 116, 114, 1, 0, 0, 0, 116, 115, 1, 0, 0, 0, 117, 5, 1, 0, 0, 0, 118, 132, 3, 32, 16, 0, 119, 132, 3, 36, 18, 0, 120, 132, 3, 50, 25, 0, 121, 132, 3, 56, 28, 0, 122, 132, 3, 52, 26, 0, 123, 132, 3, 34, 17, 0, 124, 132, 3, 8, 4, 0, 125, 132, 3, 58, 29, 0, 126, 132, 3, 60, 30, 0, 127, 132, 3, 64, 32, 0, 128, 132, 3, 66, 33, 0, 129, 132, 3, 92, 46, 0, 130, 132, 3, 68, 34, 0, 131, 118, 1, 0, 0, 0, 131, 119, 1, 0, 0, 0, 131, 120, 1, 0, 0, 0, 131, 121, 1, 0, 0, 0, 131, 122, 1, 0, 0, 0, 131, 123, 1, 0, 0, 0, 131, 124, 1, 0, 0, 0, 131, 125, 1, 0, 0, 0, 131, 126, 1, 0, 0, 0, 131, 127, 1, 0, 0, 0, 131, 128, 1, 0, 0, 0, 131, 129, 1, 0, 0, 0, 131, 130, 1, 0, 0, 0, 132, 7, 1, 0, 0, 0, 133, 134, 5, 17, 0, 0, 134, 135, 3, 10, 5, 0, 135, 9, 1, 0, 0, 0, 136, 137, 6, 5, -1, 0, 137, 138, 5, 43, 0, 0, 138, 165, 3, 10, 5, 7, 139, 165, 3, 14, 7, 0, 140, 165, 3, 12, 6, 0, 141, 143, 3, 14, 7, 0, 142, 144, 5, 43, 0, 0, 143, 142, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144, 145, 1, 0, 0, 0, 145, 146, 5, 40, 0, 0, 146, 147, 5, 39, 0, 0, 147, 152, 3, 14, 7, 0, 148, 149, 5, 33, 0, 0, 149, 151, 3, 14, 7, 0, 150, 148, 1, 0, 0, 0, 151, 154, 1, 0, 0, 0, 152, 150, 1, 0, 0, 0, 152, 153, 1, 0, 0, 0, 153, 155, 1, 0, 0, 0, 154, 152, 1, 0, 0, 0, 155, 156, 5, 49, 0, 0, 156, 165, 1, 0, 0, 0, 157, 158, 3, 14, 7, 0, 158, 160, 5, 41, 0, 0, 159, 161, 5, 43, 0, 0, 160, 159, 1, 0, 0, 0, 160, 161, 1, 0, 0, 0, 161, 162, 1, 0, 0, 0, 162, 163, 5, 44, 0, 0, 163, 165, 1, 0, 0, 0, 164, 136, 1, 0, 0, 0, 164, 139, 1, 0, 0, 0, 164, 140, 1, 0, 0, 0, 164, 141, 1, 0, 0, 0, 164, 157, 1, 0, 0, 0, 165, 174, 1, 0, 0, 0, 166, 167, 10, 4, 0, 0, 167, 168, 5, 30, 0, 0, 168, 173, 3, 10, 5, 5, 169, 170, 10, 3, 0, 0, 170, 171, 5, 46, 0, 0, 171, 173, 3, 10, 5, 4, 172, 166, 1, 0, 0, 0, 172, 169, 1, 0, 0, 0, 173, 176, 1, 0, 0, 0, 174, 172, 1, 0, 0, 0, 174, 175, 1, 0, 0, 0, 175, 11, 1, 0, 0, 0, 176, 174, 1, 0, 0, 0, 177, 179, 3, 14, 7, 0, 178, 180, 5, 43, 0, 0, 179, 178, 1, 0, 0, 0, 179, 180, 1, 0, 0, 0, 180, 181, 1, 0, 0, 0, 181, 182, 5, 42, 0, 0, 182, 183, 3, 82, 41, 0, 183, 192, 1, 0, 0, 0, 184, 186, 3, 14, 7, 0, 185, 187, 5, 43, 0, 0, 186, 185, 1, 0, 0, 0, 186, 187, 1, 0, 0, 0, 187, 188, 1, 0, 0, 0, 188, 189, 5, 48, 0, 0, 189, 190, 3, 82, 41, 0, 190, 192, 1, 0, 0, 0, 191, 177, 1, 0, 0, 0, 191, 184, 1, 0, 0, 0, 192, 13, 1, 0, 0, 0, 193, 199, 3, 16, 8, 0, 194, 195, 3, 16, 8, 0, 195, 196, 3, 84, 42, 0, 196, 197, 3, 16, 8, 0, 197, 199, 1, 0, 0, 0, 198, 193, 1, 0, 0, 0, 198, 194, 1, 0, 0, 0, 199, 15, 1, 0, 0, 0, 200, 201, 6, 8, -1, 0, 201, 205, 3, 18, 9, 0, 202, 203, 7, 0, 0, 0, 203, 205, 3, 16, 8, 3, 204, 200, 1, 0, 0, 0, 204, 202, 1, 0, 0, 0, 205, 214, 1, 0, 0, 0, 206, 207, 10, 2, 0, 0, 207, 208, 7, 1, 0, 0, 208, 213, 3, 16, 8, 3, 209, 210, 10, 1, 0, 0, 210, 211, 7, 0, 0, 0, 211, 213, 3, 16, 8, 2, 212, 206, 1, 0, 0, 0, 212, 209, 1, 0, 0, 0, 213, 216, 1, 0, 0, 0, 214, 212, 1, 0, 0, 0, 214, 215, 1, 0, 0, 0, 215, 17, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 217, 225, 3, 48, 24, 0, 218, 225, 3, 40, 20, 0, 219, 225, 3, 20, 10, 0, 220, 221, 5, 39, 0, 0, 221, 222, 3, 10, 5, 0, 222, 223, 5, 49, 0, 0, 223, 225, 1, 0, 0, 0, 224, 217, 1, 0, 0, 0, 224, 218, 1, 0, 0, 0, 224, 219, 1, 0, 0, 0, 224, 220, 1, 0, 0, 0, 225, 19, 1, 0, 0, 0, 226, 227, 3, 44, 22, 0, 227, 237, 5, 39, 0, 0, 228, 238, 5, 60, 0, 0, 229, 234, 3, 10, 5, 0, 230, 231, 5, 33, 0, 0, 231, 233, 3, 10, 5, 0, 232, 230, 1, 0, 0, 0, 233, 236, 1, 0, 0, 0, 234, 232, 1, 0, 0, 0, 234, 235, 1, 0, 0, 0, 235, 238, 1, 0, 0, 0, 236, 234, 1, 0, 0, 0, 237, 228, 1, 0, 0, 0, 237, 229, 1, 0, 0, 0, 237, 238, 1, 0, 0, 0, 238, 239, 1, 0, 0, 0, 239, 240, 5, 49, 0, 0, 240, 21, 1, 0, 0, 0, 241, 242, 5, 13, 0, 0, 242, 243, 3, 24, 12, 0, 243, 23, 1, 0, 0, 0, 244, 249, 3, 26, 13, 0, 245, 246, 5, 33, 0, 0, 246, 248, 3, 26, 13, 0, 247, 245, 1, 0, 0, 0, 248, 251, 1, 0, 0, 0, 249, 247, 1, 0, 0, 0, 249, 250, 1, 0, 0, 0, 250, 25, 1, 0, 0, 0, 251, 249, 1, 0, 0, 0, 252, 258, 3, 10, 5, 0, 253, 254, 3, 40, 20, 0, 254, 255, 5, 32, 0, 0, 255, 256, 3, 10, 5, 0, 256, 258, 1, 0, 0, 0, 257, 252, 1, 0, 0, 0, 257, 253, 1, 0, 0, 0, 258, 27, 1, 0, 0, 0, 259, 260, 5, 6, 0, 0, 260, 265, 3, 38, 19, 0, 261, 262, 5, 33, 0, 0, 262, 264, 3, 38, 19, 0, 263, 261, 1, 0, 0, 0, 264, 267, 1, 0, 0, 0, 265, 263, 1, 0, 0, 0, 265, 266, 1, 0, 0, 0, 266, 269, 1, 0, 0, 0, 267, 265, 1, 0, 0, 0, 268, 270, 3, 30, 15, 0, 269, 268, 1, 0, 0, 0, 269, 270, 1, 0, 0, 0, 270, 29, 1, 0, 0, 0, 271, 272, 5, 63, 0, 0, 272, 273, 5, 70, 0, 0, 273, 278, 3, 38, 19, 0, 274, 275, 5, 33, 0, 0, 275, 277, 3, 38, 19, 0, 276, 274, 1, 0, 0, 0, 277, 280, 1, 0, 0, 0, 278, 276, 1, 0, 0, 0, 278, 279, 1, 0, 0, 0, 279, 281, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 281, 282, 5, 64, 0, 0, 282, 31, 1, 0, 0, 0, 283, 284, 5, 4, 0, 0, 284, 285, 3, 24, 12, 0, 285, 33, 1, 0, 0, 0, 286, 288, 5, 16, 0, 0, 287, 289, 3, 24, 12, 0, 288, 287, 1, 0, 0, 0, 288, 289, 1, 0, 0, 0, 289, 292, 1, 0, 0, 0, 290, 291, 5, 29, 0, 0, 291, 293, 3, 24, 12, 0, 292, 290, 1, 0, 0, 0, 292, 293, 1, 0, 0, 0, 293, 35, 1, 0, 0, 0, 294, 295, 5, 8, 0, 0, 295, 298, 3, 24, 12, 0, 296, 297, 5, 29, 0, 0, 297, 299, 3, 24, 12, 0, 298, 296, 1, 0, 0, 0, 298, 299, 1, 0, 0, 0, 299, 37, 1, 0, 0, 0, 300, 301, 7, 2, 0, 0, 301, 39, 1, 0, 0, 0, 302, 307, 3, 44, 22, 0, 303, 304, 5, 35, 0, 0, 304, 306, 3, 44, 22, 0, 305, 303, 1, 0, 0, 0, 306, 309, 1, 0, 0, 0, 307, 305, 1, 0, 0, 0, 307, 308, 1, 0, 0, 0, 308, 41, 1, 0, 0, 0, 309, 307, 1, 0, 0, 0, 310, 315, 3, 46, 23, 0, 311, 312, 5, 35, 0, 0, 312, 314, 3, 46, 23, 0, 313, 311, 1, 0, 0, 0, 314, 317, 1, 0, 0, 0, 315, 313, 1, 0, 0, 0, 315, 316, 1, 0, 0, 0, 316, 43, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 318, 319, 7, 3, 0, 0, 319, 45, 1, 0, 0, 0, 320, 321, 7, 4, 0, 0, 321, 47, 1, 0, 0, 0, 322, 365, 5, 44, 0, 0, 323, 324, 3, 80, 40, 0, 324, 325, 5, 65, 0, 0, 325, 365, 1, 0, 0, 0, 326, 365, 3, 78, 39, 0, 327, 365, 3, 80, 40, 0, 328, 365, 3, 74, 37, 0, 329, 365, 5, 47, 0, 0, 330, 365, 3, 82, 41, 0, 331, 332, 5, 63, 0, 0, 332, 337, 3, 76, 38, 0, 333, 334, 5, 33, 0, 0, 334, 336, 3, 76, 38, 0, 335, 333, 1, 0, 0, 0, 336, 339, 1, 0, 0, 0, 337, 335, 1, 0, 0, 0, 337, 338, 1, 0, 0, 0, 338, 340, 1, 0, 0, 0, 339, 337, 1, 0, 0, 0, 340, 341, 5, 64, 0, 0, 341, 365, 1, 0, 0, 0, 342, 343, 5, 63, 0, 0, 343, 348, 3, 74, 37, 0, 344, 345, 5, 33, 0, 0, 345, 347, 3, 74, 37, 0, 346, 344, 1, 0, 0, 0, 347, 350, 1, 0, 0, 0, 348, 346, 1, 0, 0, 0, 348, 349, 1, 0, 0, 0, 349, 351, 1, 0, 0, 0, 350, 348, 1, 0, 0, 0, 351, 352, 5, 64, 0, 0, 352, 365, 1, 0, 0, 0, 353, 354, 5, 63, 0, 0, 354, 359, 3, 82, 41, 0, 355, 356, 5, 33, 0, 0, 356, 358, 3, 82, 41, 0, 357, 355, 1, 0, 0, 0, 358, 361, 1, 0, 0, 0, 359, 357, 1, 0, 0, 0, 359, 360, 1, 0, 0, 0, 360, 362, 1, 0, 0, 0, 361, 359, 1, 0, 0, 0, 362, 363, 5, 64, 0, 0, 363, 365, 1, 0, 0, 0, 364, 322, 1, 0, 0, 0, 364, 323, 1, 0, 0, 0, 364, 326, 1, 0, 0, 0, 364, 327, 1, 0, 0, 0, 364, 328, 1, 0, 0, 0, 364, 329, 1, 0, 0, 0, 364, 330, 1, 0, 0, 0, 364, 331, 1, 0, 0, 0, 364, 342, 1, 0, 0, 0, 364, 353, 1, 0, 0, 0, 365, 49, 1, 0, 0, 0, 366, 367, 5, 10, 0, 0, 367, 368, 5, 27, 0, 0, 368, 51, 1, 0, 0, 0, 369, 370, 5, 15, 0, 0, 370, 375, 3, 54, 27, 0, 371, 372, 5, 33, 0, 0, 372, 374, 3, 54, 27, 0, 373, 371, 1, 0, 0, 0, 374, 377, 1, 0, 0, 0, 375, 373, 1, 0, 0, 0, 375, 376, 1, 0, 0, 0, 376, 53, 1, 0, 0, 0, 377, 375, 1, 0, 0, 0, 378, 380, 3, 10, 5, 0, 379, 381, 7, 5, 0, 0, 380, 379, 1, 0, 0, 0, 380, 381, 1, 0, 0, 0, 381, 384, 1, 0, 0, 0, 382, 383, 5, 45, 0, 0, 383, 385, 7, 6, 0, 0, 384, 382, 1, 0, 0, 0, 384, 385, 1, 0, 0, 0, 385, 55, 1, 0, 0, 0, 386, 387, 5, 9, 0, 0, 387, 392, 3, 42, 21, 0, 388, 389, 5, 33, 0, 0, 389, 391, 3, 42, 21, 0, 390, 388, 1, 0, 0, 0, 391, 394, 1, 0, 0, 0, 392, 390, 1, 0, 0, 0, 392, 393, 1, 0, 0, 0, 393, 57, 1, 0, 0, 0, 394, 392, 1, 0, 0, 0, 395, 396, 5, 2, 0, 0, 396, 401, 3, 42, 21, 0, 397, 398, 5, 33, 0, 0, 398, 400, 3, 42, 21, 0, 399, 397, 1, 0, 0, 0, 400, 403, 1, 0, 0, 0, 401, 399, 1, 0, 0, 0, 401, 402, 1, 0, 0, 0, 402, 59, 1, 0, 0, 0, 403, 401, 1, 0, 0, 0, 404, 405, 5, 12, 0, 0, 405, 410, 3, 62, 31, 0, 406, 407, 5, 33, 0, 0, 407, 409, 3, 62, 31, 0, 408, 406, 1, 0, 0, 0, 409, 412, 1, 0, 0, 0, 410, 408, 1, 0, 0, 0, 410, 411, 1, 0, 0, 0, 411, 61, 1, 0, 0, 0, 412, 410, 1, 0, 0, 0, 413, 414, 3, 42, 21, 0, 414, 415, 5, 79, 0, 0, 415, 416, 3, 42, 21, 0, 416, 63, 1, 0, 0, 0, 417, 418, 5, 1, 0, 0, 418, 419, 3, 18, 9, 0, 419, 421, 3, 82, 41, 0, 420, 422, 3, 70, 35, 0, 421, 420, 1, 0, 0, 0, 421, 422, 1, 0, 0, 0, 422, 65, 1, 0, 0, 0, 423, 424, 5, 7, 0, 0, 424, 425, 3, 18, 9, 0, 425, 426, 3, 82, 41, 0, 426, 67, 1, 0, 0, 0, 427, 428, 5, 11, 0, 0, 428, 429, 3, 40, 20, 0, 429, 69, 1, 0, 0, 0, 430, 435, 3, 72, 36, 0, 431, 432, 5, 33, 0, 0, 432, 434, 3, 72, 36, 0, 433, 431, 1, 0, 0, 0, 434, 437, 1, 0, 0, 0, 435, 433, 1, 0, 0, 0, 435, 436, 1, 0, 0, 0, 436, 71, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 438, 439, 3, 44, 22, 0, 439, 440, 5, 32, 0, 0, 440, 441, 3, 48, 24, 0, 441, 73, 1, 0, 0, 0, 442, 443, 7, 7, 0, 0, 443, 75, 1, 0, 0, 0, 444, 447, 3, 78, 39, 0, 445, 447, 3, 80, 40, 0, 446, 444, 1, 0, 0, 0, 446, 445, 1, 0, 0, 0, 447, 77, 1, 0, 0, 0, 448, 450, 7, 0, 0, 0, 449, 448, 1, 0, 0, 0, 449, 450, 1, 0, 0, 0, 450, 451, 1, 0, 0, 0, 451, 452, 5, 28, 0, 0, 452, 79, 1, 0, 0, 0, 453, 455, 7, 0, 0, 0, 454, 453, 1, 0, 0, 0, 454, 455, 1, 0, 0, 0, 455, 456, 1, 0, 0, 0, 456, 457, 5, 27, 0, 0, 457, 81, 1, 0, 0, 0, 458, 459, 5, 26, 0, 0, 459, 83, 1, 0, 0, 0, 460, 461, 7, 8, 0, 0, 461, 85, 1, 0, 0, 0, 462, 463, 5, 5, 0, 0, 463, 464, 3, 88, 44, 0, 464, 87, 1, 0, 0, 0, 465, 466, 5, 63, 0, 0, 466, 467, 3, 2, 1, 0, 467, 468, 5, 64, 0, 0, 468, 89, 1, 0, 0, 0, 469, 470, 5, 14, 0, 0, 470, 474, 5, 95, 0, 0, 471, 472, 5, 14, 0, 0, 472, 474, 5, 96, 0, 0, 473, 469, 1, 0, 0, 0, 473, 471, 1, 0, 0, 0, 474, 91, 1, 0, 0, 0, 475, 479, 5, 3, 0, 0, 476, 478, 3, 96, 48, 0, 477, 476, 1, 0, 0, 0, 478, 481, 1, 0, 0, 0, 479, 477, 1, 0, 0, 0, 479, 480, 1, 0, 0, 0, 480, 482, 1, 0, 0, 0, 481, 479, 1, 0, 0, 0, 482, 485, 5, 85, 0, 0, 483, 484, 5, 83, 0, 0, 484, 486, 3, 42, 21, 0, 485, 483, 1, 0, 0, 0, 485, 486, 1, 0, 0, 0, 486, 496, 1, 0, 0, 0, 487, 488, 5, 84, 0, 0, 488, 493, 3, 94, 47, 0, 489, 490, 5, 33, 0, 0, 490, 492, 3, 94, 47, 0, 491, 489, 1, 0, 0, 0, 492, 495, 1, 0, 0, 0, 493, 491, 1, 0, 0, 0, 493, 494, 1, 0, 0, 0, 494, 497, 1, 0, 0, 0, 495, 493, 1, 0, 0, 0, 496, 487, 1, 0, 0, 0, 496, 497, 1, 0, 0, 0, 497, 93, 1, 0, 0, 0, 498, 499, 3, 42, 21, 0, 499, 500, 5, 32, 0, 0, 500, 502, 1, 0, 0, 0, 501, 498, 1, 0, 0, 0, 501, 502, 1, 0, 0, 0, 502, 503, 1, 0, 0, 0, 503, 504, 3, 42, 21, 0, 504, 95, 1, 0, 0, 0, 505, 506, 5, 63, 0, 0, 506, 507, 5, 101, 0, 0, 507, 508, 5, 100, 0, 0, 508, 509, 5, 101, 0, 0, 509, 510, 5, 64, 0, 0, 510, 97, 1, 0, 0, 0, 50, 109, 116, 131, 143, 152, 160, 164, 172, 174, 179, 186, 191, 198, 204, 212, 214, 224, 234, 237, 249, 257, 265, 269, 278, 288, 292, 298, 307, 315, 337, 348, 359, 364, 375, 380, 384, 392, 401, 410, 421, 435, 446, 449, 454, 473, 479, 485, 493, 496, 501] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index 846a28cccc817..6fd3d0008474a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -18,25 +18,25 @@ public class EsqlBaseParser extends Parser { new PredictionContextCache(); public static final int DISSECT=1, DROP=2, ENRICH=3, EVAL=4, EXPLAIN=5, FROM=6, GROK=7, INLINESTATS=8, - KEEP=9, LIMIT=10, MV_EXPAND=11, PROJECT=12, RENAME=13, ROW=14, SHOW=15, - SORT=16, STATS=17, WHERE=18, UNKNOWN_CMD=19, LINE_COMMENT=20, MULTILINE_COMMENT=21, - WS=22, EXPLAIN_WS=23, EXPLAIN_LINE_COMMENT=24, EXPLAIN_MULTILINE_COMMENT=25, - PIPE=26, STRING=27, INTEGER_LITERAL=28, DECIMAL_LITERAL=29, BY=30, AND=31, - ASC=32, ASSIGN=33, COMMA=34, DESC=35, DOT=36, FALSE=37, FIRST=38, LAST=39, - LP=40, IN=41, IS=42, LIKE=43, NOT=44, NULL=45, NULLS=46, OR=47, PARAM=48, - RLIKE=49, RP=50, TRUE=51, EQ=52, CIEQ=53, NEQ=54, LT=55, LTE=56, GT=57, - GTE=58, PLUS=59, MINUS=60, ASTERISK=61, SLASH=62, PERCENT=63, OPENING_BRACKET=64, - CLOSING_BRACKET=65, UNQUOTED_IDENTIFIER=66, QUOTED_IDENTIFIER=67, EXPR_LINE_COMMENT=68, - EXPR_MULTILINE_COMMENT=69, EXPR_WS=70, METADATA=71, FROM_UNQUOTED_IDENTIFIER=72, - FROM_LINE_COMMENT=73, FROM_MULTILINE_COMMENT=74, FROM_WS=75, UNQUOTED_ID_PATTERN=76, - PROJECT_LINE_COMMENT=77, PROJECT_MULTILINE_COMMENT=78, PROJECT_WS=79, - AS=80, RENAME_LINE_COMMENT=81, RENAME_MULTILINE_COMMENT=82, RENAME_WS=83, - ON=84, WITH=85, ENRICH_POLICY_NAME=86, ENRICH_LINE_COMMENT=87, ENRICH_MULTILINE_COMMENT=88, - ENRICH_WS=89, ENRICH_FIELD_LINE_COMMENT=90, ENRICH_FIELD_MULTILINE_COMMENT=91, - ENRICH_FIELD_WS=92, MVEXPAND_LINE_COMMENT=93, MVEXPAND_MULTILINE_COMMENT=94, - MVEXPAND_WS=95, INFO=96, FUNCTIONS=97, SHOW_LINE_COMMENT=98, SHOW_MULTILINE_COMMENT=99, - SHOW_WS=100, COLON=101, SETTING=102, SETTING_LINE_COMMENT=103, SETTTING_MULTILINE_COMMENT=104, - SETTING_WS=105; + KEEP=9, LIMIT=10, MV_EXPAND=11, RENAME=12, ROW=13, SHOW=14, SORT=15, STATS=16, + WHERE=17, UNKNOWN_CMD=18, LINE_COMMENT=19, MULTILINE_COMMENT=20, WS=21, + EXPLAIN_WS=22, EXPLAIN_LINE_COMMENT=23, EXPLAIN_MULTILINE_COMMENT=24, + PIPE=25, STRING=26, INTEGER_LITERAL=27, DECIMAL_LITERAL=28, BY=29, AND=30, + ASC=31, ASSIGN=32, COMMA=33, DESC=34, DOT=35, FALSE=36, FIRST=37, LAST=38, + LP=39, IN=40, IS=41, LIKE=42, NOT=43, NULL=44, NULLS=45, OR=46, PARAM=47, + RLIKE=48, RP=49, TRUE=50, EQ=51, CIEQ=52, NEQ=53, LT=54, LTE=55, GT=56, + GTE=57, PLUS=58, MINUS=59, ASTERISK=60, SLASH=61, PERCENT=62, OPENING_BRACKET=63, + CLOSING_BRACKET=64, UNQUOTED_IDENTIFIER=65, QUOTED_IDENTIFIER=66, EXPR_LINE_COMMENT=67, + EXPR_MULTILINE_COMMENT=68, EXPR_WS=69, METADATA=70, FROM_UNQUOTED_IDENTIFIER=71, + FROM_LINE_COMMENT=72, FROM_MULTILINE_COMMENT=73, FROM_WS=74, UNQUOTED_ID_PATTERN=75, + PROJECT_LINE_COMMENT=76, PROJECT_MULTILINE_COMMENT=77, PROJECT_WS=78, + AS=79, RENAME_LINE_COMMENT=80, RENAME_MULTILINE_COMMENT=81, RENAME_WS=82, + ON=83, WITH=84, ENRICH_POLICY_NAME=85, ENRICH_LINE_COMMENT=86, ENRICH_MULTILINE_COMMENT=87, + ENRICH_WS=88, ENRICH_FIELD_LINE_COMMENT=89, ENRICH_FIELD_MULTILINE_COMMENT=90, + ENRICH_FIELD_WS=91, MVEXPAND_LINE_COMMENT=92, MVEXPAND_MULTILINE_COMMENT=93, + MVEXPAND_WS=94, INFO=95, FUNCTIONS=96, SHOW_LINE_COMMENT=97, SHOW_MULTILINE_COMMENT=98, + SHOW_WS=99, COLON=100, SETTING=101, SETTING_LINE_COMMENT=102, SETTTING_MULTILINE_COMMENT=103, + SETTING_WS=104; public static final int RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, @@ -74,25 +74,25 @@ private static String[] makeRuleNames() { private static String[] makeLiteralNames() { return new String[] { null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", - "'grok'", "'inlinestats'", "'keep'", "'limit'", "'mv_expand'", "'project'", - "'rename'", "'row'", "'show'", "'sort'", "'stats'", "'where'", null, - null, null, null, null, null, null, "'|'", null, null, null, "'by'", - "'and'", "'asc'", "'='", "','", "'desc'", "'.'", "'false'", "'first'", - "'last'", "'('", "'in'", "'is'", "'like'", "'not'", "'null'", "'nulls'", - "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", - "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, "']'", - null, null, null, null, null, "'metadata'", null, null, null, null, null, - null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, - null, null, null, null, null, null, null, null, "'info'", "'functions'", - null, null, null, "':'" + "'grok'", "'inlinestats'", "'keep'", "'limit'", "'mv_expand'", "'rename'", + "'row'", "'show'", "'sort'", "'stats'", "'where'", null, null, null, + null, null, null, null, "'|'", null, null, null, "'by'", "'and'", "'asc'", + "'='", "','", "'desc'", "'.'", "'false'", "'first'", "'last'", "'('", + "'in'", "'is'", "'like'", "'not'", "'null'", "'nulls'", "'or'", "'?'", + "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", + "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, "']'", null, null, null, + null, null, "'metadata'", null, null, null, null, null, null, null, null, + "'as'", null, null, null, "'on'", "'with'", null, null, null, null, null, + null, null, null, null, null, "'info'", "'functions'", null, null, null, + "':'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); private static String[] makeSymbolicNames() { return new String[] { null, "DISSECT", "DROP", "ENRICH", "EVAL", "EXPLAIN", "FROM", "GROK", - "INLINESTATS", "KEEP", "LIMIT", "MV_EXPAND", "PROJECT", "RENAME", "ROW", - "SHOW", "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", + "INLINESTATS", "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", + "SORT", "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "PIPE", "STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "LAST", "LP", @@ -508,7 +508,6 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce } break; case KEEP: - case PROJECT: enterOuterAlt(_localctx, 4); { setState(121); @@ -1345,7 +1344,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE setState(207); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); - if ( !(((_la) & ~0x3f) == 0 && ((1L << _la) & -2305843009213693952L) != 0) ) { + if ( !(((_la) & ~0x3f) == 0 && ((1L << _la) & 8070450532247928832L) != 0) ) { ((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); } else { @@ -3164,7 +3163,6 @@ public QualifiedNamePatternContext qualifiedNamePattern(int i) { public TerminalNode COMMA(int i) { return getToken(EsqlBaseParser.COMMA, i); } - public TerminalNode PROJECT() { return getToken(EsqlBaseParser.PROJECT, 0); } @SuppressWarnings("this-escape") public KeepCommandContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -3190,65 +3188,30 @@ public final KeepCommandContext keepCommand() throws RecognitionException { enterRule(_localctx, 56, RULE_keepCommand); try { int _alt; - setState(404); + enterOuterAlt(_localctx, 1); + { + setState(386); + match(KEEP); + setState(387); + qualifiedNamePattern(); + setState(392); _errHandler.sync(this); - switch (_input.LA(1)) { - case KEEP: - enterOuterAlt(_localctx, 1); - { - setState(386); - match(KEEP); - setState(387); - qualifiedNamePattern(); - setState(392); - _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,36,_ctx); - while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { - if ( _alt==1 ) { - { - { - setState(388); - match(COMMA); - setState(389); - qualifiedNamePattern(); - } - } + _alt = getInterpreter().adaptivePredict(_input,36,_ctx); + while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { + if ( _alt==1 ) { + { + { + setState(388); + match(COMMA); + setState(389); + qualifiedNamePattern(); } - setState(394); - _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,36,_ctx); - } + } } - break; - case PROJECT: - enterOuterAlt(_localctx, 2); - { - setState(395); - match(PROJECT); - setState(396); - qualifiedNamePattern(); - setState(401); + setState(394); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,37,_ctx); - while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { - if ( _alt==1 ) { - { - { - setState(397); - match(COMMA); - setState(398); - qualifiedNamePattern(); - } - } - } - setState(403); - _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,37,_ctx); - } - } - break; - default: - throw new NoViableAltException(this); + _alt = getInterpreter().adaptivePredict(_input,36,_ctx); + } } } catch (RecognitionException re) { @@ -3302,27 +3265,27 @@ public final DropCommandContext dropCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(406); + setState(395); match(DROP); - setState(407); + setState(396); qualifiedNamePattern(); - setState(412); + setState(401); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,39,_ctx); + _alt = getInterpreter().adaptivePredict(_input,37,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(408); + setState(397); match(COMMA); - setState(409); + setState(398); qualifiedNamePattern(); } } } - setState(414); + setState(403); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,39,_ctx); + _alt = getInterpreter().adaptivePredict(_input,37,_ctx); } } } @@ -3377,27 +3340,27 @@ public final RenameCommandContext renameCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(415); + setState(404); match(RENAME); - setState(416); + setState(405); renameClause(); - setState(421); + setState(410); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,40,_ctx); + _alt = getInterpreter().adaptivePredict(_input,38,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(417); + setState(406); match(COMMA); - setState(418); + setState(407); renameClause(); } } } - setState(423); + setState(412); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,40,_ctx); + _alt = getInterpreter().adaptivePredict(_input,38,_ctx); } } } @@ -3449,11 +3412,11 @@ public final RenameClauseContext renameClause() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(424); + setState(413); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); - setState(425); + setState(414); match(AS); - setState(426); + setState(415); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -3506,18 +3469,18 @@ public final DissectCommandContext dissectCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(428); + setState(417); match(DISSECT); - setState(429); + setState(418); primaryExpression(); - setState(430); + setState(419); string(); - setState(432); + setState(421); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,39,_ctx) ) { case 1: { - setState(431); + setState(420); commandOptions(); } break; @@ -3570,11 +3533,11 @@ public final GrokCommandContext grokCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(434); + setState(423); match(GROK); - setState(435); + setState(424); primaryExpression(); - setState(436); + setState(425); string(); } } @@ -3621,9 +3584,9 @@ public final MvExpandCommandContext mvExpandCommand() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(438); + setState(427); match(MV_EXPAND); - setState(439); + setState(428); qualifiedName(); } } @@ -3677,25 +3640,25 @@ public final CommandOptionsContext commandOptions() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(441); + setState(430); commandOption(); - setState(446); + setState(435); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,42,_ctx); + _alt = getInterpreter().adaptivePredict(_input,40,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(442); + setState(431); match(COMMA); - setState(443); + setState(432); commandOption(); } } } - setState(448); + setState(437); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,42,_ctx); + _alt = getInterpreter().adaptivePredict(_input,40,_ctx); } } } @@ -3745,11 +3708,11 @@ public final CommandOptionContext commandOption() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(449); + setState(438); identifier(); - setState(450); + setState(439); match(ASSIGN); - setState(451); + setState(440); constant(); } } @@ -3795,7 +3758,7 @@ public final BooleanValueContext booleanValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(453); + setState(442); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -3850,20 +3813,20 @@ public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); enterRule(_localctx, 76, RULE_numericValue); try { - setState(457); + setState(446); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(455); + setState(444); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(456); + setState(445); integerValue(); } break; @@ -3912,12 +3875,12 @@ public final DecimalValueContext decimalValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(460); + setState(449); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(459); + setState(448); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -3930,7 +3893,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(462); + setState(451); match(DECIMAL_LITERAL); } } @@ -3977,12 +3940,12 @@ public final IntegerValueContext integerValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(465); + setState(454); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(464); + setState(453); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -3995,7 +3958,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(467); + setState(456); match(INTEGER_LITERAL); } } @@ -4039,7 +4002,7 @@ public final StringContext string() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(469); + setState(458); match(STRING); } } @@ -4090,9 +4053,9 @@ public final ComparisonOperatorContext comparisonOperator() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(471); + setState(460); _la = _input.LA(1); - if ( !(((_la) & ~0x3f) == 0 && ((1L << _la) & 571957152676052992L) != 0) ) { + if ( !(((_la) & ~0x3f) == 0 && ((1L << _la) & 285978576338026496L) != 0) ) { _errHandler.recoverInline(this); } else { @@ -4145,9 +4108,9 @@ public final ExplainCommandContext explainCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(473); + setState(462); match(EXPLAIN); - setState(474); + setState(463); subqueryExpression(); } } @@ -4195,11 +4158,11 @@ public final SubqueryExpressionContext subqueryExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(476); + setState(465); match(OPENING_BRACKET); - setState(477); + setState(466); query(0); - setState(478); + setState(467); match(CLOSING_BRACKET); } } @@ -4273,16 +4236,16 @@ public final ShowCommandContext showCommand() throws RecognitionException { ShowCommandContext _localctx = new ShowCommandContext(_ctx, getState()); enterRule(_localctx, 90, RULE_showCommand); try { - setState(484); + setState(473); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,44,_ctx) ) { case 1: _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(480); + setState(469); match(SHOW); - setState(481); + setState(470); match(INFO); } break; @@ -4290,9 +4253,9 @@ public final ShowCommandContext showCommand() throws RecognitionException { _localctx = new ShowFunctionsContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(482); + setState(471); match(SHOW); - setState(483); + setState(472); match(FUNCTIONS); } break; @@ -4364,62 +4327,62 @@ public final EnrichCommandContext enrichCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(486); + setState(475); match(ENRICH); - setState(490); + setState(479); _errHandler.sync(this); _la = _input.LA(1); while (_la==OPENING_BRACKET) { { { - setState(487); + setState(476); setting(); } } - setState(492); + setState(481); _errHandler.sync(this); _la = _input.LA(1); } - setState(493); + setState(482); ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); - setState(496); + setState(485); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { case 1: { - setState(494); + setState(483); match(ON); - setState(495); + setState(484); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(507); + setState(496); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,50,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { case 1: { - setState(498); + setState(487); match(WITH); - setState(499); + setState(488); enrichWithClause(); - setState(504); + setState(493); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,49,_ctx); + _alt = getInterpreter().adaptivePredict(_input,47,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(500); + setState(489); match(COMMA); - setState(501); + setState(490); enrichWithClause(); } } } - setState(506); + setState(495); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,49,_ctx); + _alt = getInterpreter().adaptivePredict(_input,47,_ctx); } } break; @@ -4474,19 +4437,19 @@ public final EnrichWithClauseContext enrichWithClause() throws RecognitionExcept try { enterOuterAlt(_localctx, 1); { - setState(512); + setState(501); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,51,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,49,_ctx) ) { case 1: { - setState(509); + setState(498); ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(510); + setState(499); match(ASSIGN); } break; } - setState(514); + setState(503); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -4538,15 +4501,15 @@ public final SettingContext setting() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(516); + setState(505); match(OPENING_BRACKET); - setState(517); + setState(506); ((SettingContext)_localctx).name = match(SETTING); - setState(518); + setState(507); match(COLON); - setState(519); + setState(508); ((SettingContext)_localctx).value = match(SETTING); - setState(520); + setState(509); match(CLOSING_BRACKET); } } @@ -4599,7 +4562,7 @@ private boolean operatorExpression_sempred(OperatorExpressionContext _localctx, } public static final String _serializedATN = - "\u0004\u0001i\u020b\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ + "\u0004\u0001h\u0200\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ @@ -4658,283 +4621,275 @@ private boolean operatorExpression_sempred(OperatorExpressionContext _localctx, "\b\u001a\n\u001a\f\u001a\u0179\t\u001a\u0001\u001b\u0001\u001b\u0003\u001b"+ "\u017d\b\u001b\u0001\u001b\u0001\u001b\u0003\u001b\u0181\b\u001b\u0001"+ "\u001c\u0001\u001c\u0001\u001c\u0001\u001c\u0005\u001c\u0187\b\u001c\n"+ - "\u001c\f\u001c\u018a\t\u001c\u0001\u001c\u0001\u001c\u0001\u001c\u0001"+ - "\u001c\u0005\u001c\u0190\b\u001c\n\u001c\f\u001c\u0193\t\u001c\u0003\u001c"+ - "\u0195\b\u001c\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0005\u001d"+ - "\u019b\b\u001d\n\u001d\f\u001d\u019e\t\u001d\u0001\u001e\u0001\u001e\u0001"+ - "\u001e\u0001\u001e\u0005\u001e\u01a4\b\u001e\n\u001e\f\u001e\u01a7\t\u001e"+ - "\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f\u0001 \u0001 \u0001 "+ - "\u0001 \u0003 \u01b1\b \u0001!\u0001!\u0001!\u0001!\u0001\"\u0001\"\u0001"+ - "\"\u0001#\u0001#\u0001#\u0005#\u01bd\b#\n#\f#\u01c0\t#\u0001$\u0001$\u0001"+ - "$\u0001$\u0001%\u0001%\u0001&\u0001&\u0003&\u01ca\b&\u0001\'\u0003\'\u01cd"+ - "\b\'\u0001\'\u0001\'\u0001(\u0003(\u01d2\b(\u0001(\u0001(\u0001)\u0001"+ - ")\u0001*\u0001*\u0001+\u0001+\u0001+\u0001,\u0001,\u0001,\u0001,\u0001"+ - "-\u0001-\u0001-\u0001-\u0003-\u01e5\b-\u0001.\u0001.\u0005.\u01e9\b.\n"+ - ".\f.\u01ec\t.\u0001.\u0001.\u0001.\u0003.\u01f1\b.\u0001.\u0001.\u0001"+ - ".\u0001.\u0005.\u01f7\b.\n.\f.\u01fa\t.\u0003.\u01fc\b.\u0001/\u0001/"+ - "\u0001/\u0003/\u0201\b/\u0001/\u0001/\u00010\u00010\u00010\u00010\u0001"+ - "0\u00010\u00010\u0000\u0003\u0002\n\u00101\u0000\u0002\u0004\u0006\b\n"+ - "\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.0246"+ - "8:<>@BDFHJLNPRTVXZ\\^`\u0000\t\u0001\u0000;<\u0001\u0000=?\u0002\u0000"+ - "CCHH\u0001\u0000BC\u0002\u0000CCLL\u0002\u0000 ##\u0001\u0000&\'\u0002"+ - "\u0000%%33\u0001\u00004:\u0228\u0000b\u0001\u0000\u0000\u0000\u0002e\u0001"+ - "\u0000\u0000\u0000\u0004t\u0001\u0000\u0000\u0000\u0006\u0083\u0001\u0000"+ - "\u0000\u0000\b\u0085\u0001\u0000\u0000\u0000\n\u00a4\u0001\u0000\u0000"+ - "\u0000\f\u00bf\u0001\u0000\u0000\u0000\u000e\u00c6\u0001\u0000\u0000\u0000"+ - "\u0010\u00cc\u0001\u0000\u0000\u0000\u0012\u00e0\u0001\u0000\u0000\u0000"+ - "\u0014\u00e2\u0001\u0000\u0000\u0000\u0016\u00f1\u0001\u0000\u0000\u0000"+ - "\u0018\u00f4\u0001\u0000\u0000\u0000\u001a\u0101\u0001\u0000\u0000\u0000"+ - "\u001c\u0103\u0001\u0000\u0000\u0000\u001e\u010f\u0001\u0000\u0000\u0000"+ - " \u011b\u0001\u0000\u0000\u0000\"\u011e\u0001\u0000\u0000\u0000$\u0126"+ - "\u0001\u0000\u0000\u0000&\u012c\u0001\u0000\u0000\u0000(\u012e\u0001\u0000"+ - "\u0000\u0000*\u0136\u0001\u0000\u0000\u0000,\u013e\u0001\u0000\u0000\u0000"+ - ".\u0140\u0001\u0000\u0000\u00000\u016c\u0001\u0000\u0000\u00002\u016e"+ - "\u0001\u0000\u0000\u00004\u0171\u0001\u0000\u0000\u00006\u017a\u0001\u0000"+ - "\u0000\u00008\u0194\u0001\u0000\u0000\u0000:\u0196\u0001\u0000\u0000\u0000"+ - "<\u019f\u0001\u0000\u0000\u0000>\u01a8\u0001\u0000\u0000\u0000@\u01ac"+ - "\u0001\u0000\u0000\u0000B\u01b2\u0001\u0000\u0000\u0000D\u01b6\u0001\u0000"+ - "\u0000\u0000F\u01b9\u0001\u0000\u0000\u0000H\u01c1\u0001\u0000\u0000\u0000"+ - "J\u01c5\u0001\u0000\u0000\u0000L\u01c9\u0001\u0000\u0000\u0000N\u01cc"+ - "\u0001\u0000\u0000\u0000P\u01d1\u0001\u0000\u0000\u0000R\u01d5\u0001\u0000"+ - "\u0000\u0000T\u01d7\u0001\u0000\u0000\u0000V\u01d9\u0001\u0000\u0000\u0000"+ - "X\u01dc\u0001\u0000\u0000\u0000Z\u01e4\u0001\u0000\u0000\u0000\\\u01e6"+ - "\u0001\u0000\u0000\u0000^\u0200\u0001\u0000\u0000\u0000`\u0204\u0001\u0000"+ - "\u0000\u0000bc\u0003\u0002\u0001\u0000cd\u0005\u0000\u0000\u0001d\u0001"+ - "\u0001\u0000\u0000\u0000ef\u0006\u0001\uffff\uffff\u0000fg\u0003\u0004"+ - "\u0002\u0000gm\u0001\u0000\u0000\u0000hi\n\u0001\u0000\u0000ij\u0005\u001a"+ - "\u0000\u0000jl\u0003\u0006\u0003\u0000kh\u0001\u0000\u0000\u0000lo\u0001"+ - "\u0000\u0000\u0000mk\u0001\u0000\u0000\u0000mn\u0001\u0000\u0000\u0000"+ - "n\u0003\u0001\u0000\u0000\u0000om\u0001\u0000\u0000\u0000pu\u0003V+\u0000"+ - "qu\u0003\u001c\u000e\u0000ru\u0003\u0016\u000b\u0000su\u0003Z-\u0000t"+ - "p\u0001\u0000\u0000\u0000tq\u0001\u0000\u0000\u0000tr\u0001\u0000\u0000"+ - "\u0000ts\u0001\u0000\u0000\u0000u\u0005\u0001\u0000\u0000\u0000v\u0084"+ - "\u0003 \u0010\u0000w\u0084\u0003$\u0012\u0000x\u0084\u00032\u0019\u0000"+ - "y\u0084\u00038\u001c\u0000z\u0084\u00034\u001a\u0000{\u0084\u0003\"\u0011"+ - "\u0000|\u0084\u0003\b\u0004\u0000}\u0084\u0003:\u001d\u0000~\u0084\u0003"+ - "<\u001e\u0000\u007f\u0084\u0003@ \u0000\u0080\u0084\u0003B!\u0000\u0081"+ - "\u0084\u0003\\.\u0000\u0082\u0084\u0003D\"\u0000\u0083v\u0001\u0000\u0000"+ - "\u0000\u0083w\u0001\u0000\u0000\u0000\u0083x\u0001\u0000\u0000\u0000\u0083"+ - "y\u0001\u0000\u0000\u0000\u0083z\u0001\u0000\u0000\u0000\u0083{\u0001"+ - "\u0000\u0000\u0000\u0083|\u0001\u0000\u0000\u0000\u0083}\u0001\u0000\u0000"+ - "\u0000\u0083~\u0001\u0000\u0000\u0000\u0083\u007f\u0001\u0000\u0000\u0000"+ - "\u0083\u0080\u0001\u0000\u0000\u0000\u0083\u0081\u0001\u0000\u0000\u0000"+ - "\u0083\u0082\u0001\u0000\u0000\u0000\u0084\u0007\u0001\u0000\u0000\u0000"+ - "\u0085\u0086\u0005\u0012\u0000\u0000\u0086\u0087\u0003\n\u0005\u0000\u0087"+ - "\t\u0001\u0000\u0000\u0000\u0088\u0089\u0006\u0005\uffff\uffff\u0000\u0089"+ - "\u008a\u0005,\u0000\u0000\u008a\u00a5\u0003\n\u0005\u0007\u008b\u00a5"+ - "\u0003\u000e\u0007\u0000\u008c\u00a5\u0003\f\u0006\u0000\u008d\u008f\u0003"+ - "\u000e\u0007\u0000\u008e\u0090\u0005,\u0000\u0000\u008f\u008e\u0001\u0000"+ - "\u0000\u0000\u008f\u0090\u0001\u0000\u0000\u0000\u0090\u0091\u0001\u0000"+ - "\u0000\u0000\u0091\u0092\u0005)\u0000\u0000\u0092\u0093\u0005(\u0000\u0000"+ - "\u0093\u0098\u0003\u000e\u0007\u0000\u0094\u0095\u0005\"\u0000\u0000\u0095"+ - "\u0097\u0003\u000e\u0007\u0000\u0096\u0094\u0001\u0000\u0000\u0000\u0097"+ - "\u009a\u0001\u0000\u0000\u0000\u0098\u0096\u0001\u0000\u0000\u0000\u0098"+ - "\u0099\u0001\u0000\u0000\u0000\u0099\u009b\u0001\u0000\u0000\u0000\u009a"+ - "\u0098\u0001\u0000\u0000\u0000\u009b\u009c\u00052\u0000\u0000\u009c\u00a5"+ - "\u0001\u0000\u0000\u0000\u009d\u009e\u0003\u000e\u0007\u0000\u009e\u00a0"+ - "\u0005*\u0000\u0000\u009f\u00a1\u0005,\u0000\u0000\u00a0\u009f\u0001\u0000"+ - "\u0000\u0000\u00a0\u00a1\u0001\u0000\u0000\u0000\u00a1\u00a2\u0001\u0000"+ - "\u0000\u0000\u00a2\u00a3\u0005-\u0000\u0000\u00a3\u00a5\u0001\u0000\u0000"+ - "\u0000\u00a4\u0088\u0001\u0000\u0000\u0000\u00a4\u008b\u0001\u0000\u0000"+ - "\u0000\u00a4\u008c\u0001\u0000\u0000\u0000\u00a4\u008d\u0001\u0000\u0000"+ - "\u0000\u00a4\u009d\u0001\u0000\u0000\u0000\u00a5\u00ae\u0001\u0000\u0000"+ - "\u0000\u00a6\u00a7\n\u0004\u0000\u0000\u00a7\u00a8\u0005\u001f\u0000\u0000"+ - "\u00a8\u00ad\u0003\n\u0005\u0005\u00a9\u00aa\n\u0003\u0000\u0000\u00aa"+ - "\u00ab\u0005/\u0000\u0000\u00ab\u00ad\u0003\n\u0005\u0004\u00ac\u00a6"+ - "\u0001\u0000\u0000\u0000\u00ac\u00a9\u0001\u0000\u0000\u0000\u00ad\u00b0"+ - "\u0001\u0000\u0000\u0000\u00ae\u00ac\u0001\u0000\u0000\u0000\u00ae\u00af"+ - "\u0001\u0000\u0000\u0000\u00af\u000b\u0001\u0000\u0000\u0000\u00b0\u00ae"+ - "\u0001\u0000\u0000\u0000\u00b1\u00b3\u0003\u000e\u0007\u0000\u00b2\u00b4"+ - "\u0005,\u0000\u0000\u00b3\u00b2\u0001\u0000\u0000\u0000\u00b3\u00b4\u0001"+ - "\u0000\u0000\u0000\u00b4\u00b5\u0001\u0000\u0000\u0000\u00b5\u00b6\u0005"+ - "+\u0000\u0000\u00b6\u00b7\u0003R)\u0000\u00b7\u00c0\u0001\u0000\u0000"+ - "\u0000\u00b8\u00ba\u0003\u000e\u0007\u0000\u00b9\u00bb\u0005,\u0000\u0000"+ - "\u00ba\u00b9\u0001\u0000\u0000\u0000\u00ba\u00bb\u0001\u0000\u0000\u0000"+ - "\u00bb\u00bc\u0001\u0000\u0000\u0000\u00bc\u00bd\u00051\u0000\u0000\u00bd"+ - "\u00be\u0003R)\u0000\u00be\u00c0\u0001\u0000\u0000\u0000\u00bf\u00b1\u0001"+ - "\u0000\u0000\u0000\u00bf\u00b8\u0001\u0000\u0000\u0000\u00c0\r\u0001\u0000"+ - "\u0000\u0000\u00c1\u00c7\u0003\u0010\b\u0000\u00c2\u00c3\u0003\u0010\b"+ - "\u0000\u00c3\u00c4\u0003T*\u0000\u00c4\u00c5\u0003\u0010\b\u0000\u00c5"+ - "\u00c7\u0001\u0000\u0000\u0000\u00c6\u00c1\u0001\u0000\u0000\u0000\u00c6"+ - "\u00c2\u0001\u0000\u0000\u0000\u00c7\u000f\u0001\u0000\u0000\u0000\u00c8"+ - "\u00c9\u0006\b\uffff\uffff\u0000\u00c9\u00cd\u0003\u0012\t\u0000\u00ca"+ - "\u00cb\u0007\u0000\u0000\u0000\u00cb\u00cd\u0003\u0010\b\u0003\u00cc\u00c8"+ - "\u0001\u0000\u0000\u0000\u00cc\u00ca\u0001\u0000\u0000\u0000\u00cd\u00d6"+ - "\u0001\u0000\u0000\u0000\u00ce\u00cf\n\u0002\u0000\u0000\u00cf\u00d0\u0007"+ - "\u0001\u0000\u0000\u00d0\u00d5\u0003\u0010\b\u0003\u00d1\u00d2\n\u0001"+ - "\u0000\u0000\u00d2\u00d3\u0007\u0000\u0000\u0000\u00d3\u00d5\u0003\u0010"+ - "\b\u0002\u00d4\u00ce\u0001\u0000\u0000\u0000\u00d4\u00d1\u0001\u0000\u0000"+ - "\u0000\u00d5\u00d8\u0001\u0000\u0000\u0000\u00d6\u00d4\u0001\u0000\u0000"+ - "\u0000\u00d6\u00d7\u0001\u0000\u0000\u0000\u00d7\u0011\u0001\u0000\u0000"+ - "\u0000\u00d8\u00d6\u0001\u0000\u0000\u0000\u00d9\u00e1\u00030\u0018\u0000"+ - "\u00da\u00e1\u0003(\u0014\u0000\u00db\u00e1\u0003\u0014\n\u0000\u00dc"+ - "\u00dd\u0005(\u0000\u0000\u00dd\u00de\u0003\n\u0005\u0000\u00de\u00df"+ - "\u00052\u0000\u0000\u00df\u00e1\u0001\u0000\u0000\u0000\u00e0\u00d9\u0001"+ - "\u0000\u0000\u0000\u00e0\u00da\u0001\u0000\u0000\u0000\u00e0\u00db\u0001"+ - "\u0000\u0000\u0000\u00e0\u00dc\u0001\u0000\u0000\u0000\u00e1\u0013\u0001"+ - "\u0000\u0000\u0000\u00e2\u00e3\u0003,\u0016\u0000\u00e3\u00ed\u0005(\u0000"+ - "\u0000\u00e4\u00ee\u0005=\u0000\u0000\u00e5\u00ea\u0003\n\u0005\u0000"+ - "\u00e6\u00e7\u0005\"\u0000\u0000\u00e7\u00e9\u0003\n\u0005\u0000\u00e8"+ - "\u00e6\u0001\u0000\u0000\u0000\u00e9\u00ec\u0001\u0000\u0000\u0000\u00ea"+ - "\u00e8\u0001\u0000\u0000\u0000\u00ea\u00eb\u0001\u0000\u0000\u0000\u00eb"+ - "\u00ee\u0001\u0000\u0000\u0000\u00ec\u00ea\u0001\u0000\u0000\u0000\u00ed"+ - "\u00e4\u0001\u0000\u0000\u0000\u00ed\u00e5\u0001\u0000\u0000\u0000\u00ed"+ - "\u00ee\u0001\u0000\u0000\u0000\u00ee\u00ef\u0001\u0000\u0000\u0000\u00ef"+ - "\u00f0\u00052\u0000\u0000\u00f0\u0015\u0001\u0000\u0000\u0000\u00f1\u00f2"+ - "\u0005\u000e\u0000\u0000\u00f2\u00f3\u0003\u0018\f\u0000\u00f3\u0017\u0001"+ - "\u0000\u0000\u0000\u00f4\u00f9\u0003\u001a\r\u0000\u00f5\u00f6\u0005\""+ - "\u0000\u0000\u00f6\u00f8\u0003\u001a\r\u0000\u00f7\u00f5\u0001\u0000\u0000"+ - "\u0000\u00f8\u00fb\u0001\u0000\u0000\u0000\u00f9\u00f7\u0001\u0000\u0000"+ - "\u0000\u00f9\u00fa\u0001\u0000\u0000\u0000\u00fa\u0019\u0001\u0000\u0000"+ - "\u0000\u00fb\u00f9\u0001\u0000\u0000\u0000\u00fc\u0102\u0003\n\u0005\u0000"+ - "\u00fd\u00fe\u0003(\u0014\u0000\u00fe\u00ff\u0005!\u0000\u0000\u00ff\u0100"+ - "\u0003\n\u0005\u0000\u0100\u0102\u0001\u0000\u0000\u0000\u0101\u00fc\u0001"+ - "\u0000\u0000\u0000\u0101\u00fd\u0001\u0000\u0000\u0000\u0102\u001b\u0001"+ - "\u0000\u0000\u0000\u0103\u0104\u0005\u0006\u0000\u0000\u0104\u0109\u0003"+ - "&\u0013\u0000\u0105\u0106\u0005\"\u0000\u0000\u0106\u0108\u0003&\u0013"+ - "\u0000\u0107\u0105\u0001\u0000\u0000\u0000\u0108\u010b\u0001\u0000\u0000"+ - "\u0000\u0109\u0107\u0001\u0000\u0000\u0000\u0109\u010a\u0001\u0000\u0000"+ - "\u0000\u010a\u010d\u0001\u0000\u0000\u0000\u010b\u0109\u0001\u0000\u0000"+ - "\u0000\u010c\u010e\u0003\u001e\u000f\u0000\u010d\u010c\u0001\u0000\u0000"+ - "\u0000\u010d\u010e\u0001\u0000\u0000\u0000\u010e\u001d\u0001\u0000\u0000"+ - "\u0000\u010f\u0110\u0005@\u0000\u0000\u0110\u0111\u0005G\u0000\u0000\u0111"+ - "\u0116\u0003&\u0013\u0000\u0112\u0113\u0005\"\u0000\u0000\u0113\u0115"+ - "\u0003&\u0013\u0000\u0114\u0112\u0001\u0000\u0000\u0000\u0115\u0118\u0001"+ - "\u0000\u0000\u0000\u0116\u0114\u0001\u0000\u0000\u0000\u0116\u0117\u0001"+ - "\u0000\u0000\u0000\u0117\u0119\u0001\u0000\u0000\u0000\u0118\u0116\u0001"+ - "\u0000\u0000\u0000\u0119\u011a\u0005A\u0000\u0000\u011a\u001f\u0001\u0000"+ - "\u0000\u0000\u011b\u011c\u0005\u0004\u0000\u0000\u011c\u011d\u0003\u0018"+ - "\f\u0000\u011d!\u0001\u0000\u0000\u0000\u011e\u0120\u0005\u0011\u0000"+ - "\u0000\u011f\u0121\u0003\u0018\f\u0000\u0120\u011f\u0001\u0000\u0000\u0000"+ - "\u0120\u0121\u0001\u0000\u0000\u0000\u0121\u0124\u0001\u0000\u0000\u0000"+ - "\u0122\u0123\u0005\u001e\u0000\u0000\u0123\u0125\u0003\u0018\f\u0000\u0124"+ - "\u0122\u0001\u0000\u0000\u0000\u0124\u0125\u0001\u0000\u0000\u0000\u0125"+ - "#\u0001\u0000\u0000\u0000\u0126\u0127\u0005\b\u0000\u0000\u0127\u012a"+ - "\u0003\u0018\f\u0000\u0128\u0129\u0005\u001e\u0000\u0000\u0129\u012b\u0003"+ - "\u0018\f\u0000\u012a\u0128\u0001\u0000\u0000\u0000\u012a\u012b\u0001\u0000"+ - "\u0000\u0000\u012b%\u0001\u0000\u0000\u0000\u012c\u012d\u0007\u0002\u0000"+ - "\u0000\u012d\'\u0001\u0000\u0000\u0000\u012e\u0133\u0003,\u0016\u0000"+ - "\u012f\u0130\u0005$\u0000\u0000\u0130\u0132\u0003,\u0016\u0000\u0131\u012f"+ - "\u0001\u0000\u0000\u0000\u0132\u0135\u0001\u0000\u0000\u0000\u0133\u0131"+ - "\u0001\u0000\u0000\u0000\u0133\u0134\u0001\u0000\u0000\u0000\u0134)\u0001"+ - "\u0000\u0000\u0000\u0135\u0133\u0001\u0000\u0000\u0000\u0136\u013b\u0003"+ - ".\u0017\u0000\u0137\u0138\u0005$\u0000\u0000\u0138\u013a\u0003.\u0017"+ - "\u0000\u0139\u0137\u0001\u0000\u0000\u0000\u013a\u013d\u0001\u0000\u0000"+ - "\u0000\u013b\u0139\u0001\u0000\u0000\u0000\u013b\u013c\u0001\u0000\u0000"+ - "\u0000\u013c+\u0001\u0000\u0000\u0000\u013d\u013b\u0001\u0000\u0000\u0000"+ - "\u013e\u013f\u0007\u0003\u0000\u0000\u013f-\u0001\u0000\u0000\u0000\u0140"+ - "\u0141\u0007\u0004\u0000\u0000\u0141/\u0001\u0000\u0000\u0000\u0142\u016d"+ - "\u0005-\u0000\u0000\u0143\u0144\u0003P(\u0000\u0144\u0145\u0005B\u0000"+ - "\u0000\u0145\u016d\u0001\u0000\u0000\u0000\u0146\u016d\u0003N\'\u0000"+ - "\u0147\u016d\u0003P(\u0000\u0148\u016d\u0003J%\u0000\u0149\u016d\u0005"+ - "0\u0000\u0000\u014a\u016d\u0003R)\u0000\u014b\u014c\u0005@\u0000\u0000"+ - "\u014c\u0151\u0003L&\u0000\u014d\u014e\u0005\"\u0000\u0000\u014e\u0150"+ - "\u0003L&\u0000\u014f\u014d\u0001\u0000\u0000\u0000\u0150\u0153\u0001\u0000"+ - "\u0000\u0000\u0151\u014f\u0001\u0000\u0000\u0000\u0151\u0152\u0001\u0000"+ - "\u0000\u0000\u0152\u0154\u0001\u0000\u0000\u0000\u0153\u0151\u0001\u0000"+ - "\u0000\u0000\u0154\u0155\u0005A\u0000\u0000\u0155\u016d\u0001\u0000\u0000"+ - "\u0000\u0156\u0157\u0005@\u0000\u0000\u0157\u015c\u0003J%\u0000\u0158"+ - "\u0159\u0005\"\u0000\u0000\u0159\u015b\u0003J%\u0000\u015a\u0158\u0001"+ - "\u0000\u0000\u0000\u015b\u015e\u0001\u0000\u0000\u0000\u015c\u015a\u0001"+ - "\u0000\u0000\u0000\u015c\u015d\u0001\u0000\u0000\u0000\u015d\u015f\u0001"+ - "\u0000\u0000\u0000\u015e\u015c\u0001\u0000\u0000\u0000\u015f\u0160\u0005"+ - "A\u0000\u0000\u0160\u016d\u0001\u0000\u0000\u0000\u0161\u0162\u0005@\u0000"+ - "\u0000\u0162\u0167\u0003R)\u0000\u0163\u0164\u0005\"\u0000\u0000\u0164"+ - "\u0166\u0003R)\u0000\u0165\u0163\u0001\u0000\u0000\u0000\u0166\u0169\u0001"+ - "\u0000\u0000\u0000\u0167\u0165\u0001\u0000\u0000\u0000\u0167\u0168\u0001"+ - "\u0000\u0000\u0000\u0168\u016a\u0001\u0000\u0000\u0000\u0169\u0167\u0001"+ - "\u0000\u0000\u0000\u016a\u016b\u0005A\u0000\u0000\u016b\u016d\u0001\u0000"+ - "\u0000\u0000\u016c\u0142\u0001\u0000\u0000\u0000\u016c\u0143\u0001\u0000"+ - "\u0000\u0000\u016c\u0146\u0001\u0000\u0000\u0000\u016c\u0147\u0001\u0000"+ - "\u0000\u0000\u016c\u0148\u0001\u0000\u0000\u0000\u016c\u0149\u0001\u0000"+ - "\u0000\u0000\u016c\u014a\u0001\u0000\u0000\u0000\u016c\u014b\u0001\u0000"+ - "\u0000\u0000\u016c\u0156\u0001\u0000\u0000\u0000\u016c\u0161\u0001\u0000"+ - "\u0000\u0000\u016d1\u0001\u0000\u0000\u0000\u016e\u016f\u0005\n\u0000"+ - "\u0000\u016f\u0170\u0005\u001c\u0000\u0000\u01703\u0001\u0000\u0000\u0000"+ - "\u0171\u0172\u0005\u0010\u0000\u0000\u0172\u0177\u00036\u001b\u0000\u0173"+ - "\u0174\u0005\"\u0000\u0000\u0174\u0176\u00036\u001b\u0000\u0175\u0173"+ + "\u001c\f\u001c\u018a\t\u001c\u0001\u001d\u0001\u001d\u0001\u001d\u0001"+ + "\u001d\u0005\u001d\u0190\b\u001d\n\u001d\f\u001d\u0193\t\u001d\u0001\u001e"+ + "\u0001\u001e\u0001\u001e\u0001\u001e\u0005\u001e\u0199\b\u001e\n\u001e"+ + "\f\u001e\u019c\t\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0001\u001f"+ + "\u0001 \u0001 \u0001 \u0001 \u0003 \u01a6\b \u0001!\u0001!\u0001!\u0001"+ + "!\u0001\"\u0001\"\u0001\"\u0001#\u0001#\u0001#\u0005#\u01b2\b#\n#\f#\u01b5"+ + "\t#\u0001$\u0001$\u0001$\u0001$\u0001%\u0001%\u0001&\u0001&\u0003&\u01bf"+ + "\b&\u0001\'\u0003\'\u01c2\b\'\u0001\'\u0001\'\u0001(\u0003(\u01c7\b(\u0001"+ + "(\u0001(\u0001)\u0001)\u0001*\u0001*\u0001+\u0001+\u0001+\u0001,\u0001"+ + ",\u0001,\u0001,\u0001-\u0001-\u0001-\u0001-\u0003-\u01da\b-\u0001.\u0001"+ + ".\u0005.\u01de\b.\n.\f.\u01e1\t.\u0001.\u0001.\u0001.\u0003.\u01e6\b."+ + "\u0001.\u0001.\u0001.\u0001.\u0005.\u01ec\b.\n.\f.\u01ef\t.\u0003.\u01f1"+ + "\b.\u0001/\u0001/\u0001/\u0003/\u01f6\b/\u0001/\u0001/\u00010\u00010\u0001"+ + "0\u00010\u00010\u00010\u00010\u0000\u0003\u0002\n\u00101\u0000\u0002\u0004"+ + "\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a\u001c\u001e \""+ + "$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`\u0000\t\u0001\u0000:;\u0001\u0000<>\u0002"+ + "\u0000BBGG\u0001\u0000AB\u0002\u0000BBKK\u0002\u0000\u001f\u001f\"\"\u0001"+ + "\u0000%&\u0002\u0000$$22\u0001\u000039\u021b\u0000b\u0001\u0000\u0000"+ + "\u0000\u0002e\u0001\u0000\u0000\u0000\u0004t\u0001\u0000\u0000\u0000\u0006"+ + "\u0083\u0001\u0000\u0000\u0000\b\u0085\u0001\u0000\u0000\u0000\n\u00a4"+ + "\u0001\u0000\u0000\u0000\f\u00bf\u0001\u0000\u0000\u0000\u000e\u00c6\u0001"+ + "\u0000\u0000\u0000\u0010\u00cc\u0001\u0000\u0000\u0000\u0012\u00e0\u0001"+ + "\u0000\u0000\u0000\u0014\u00e2\u0001\u0000\u0000\u0000\u0016\u00f1\u0001"+ + "\u0000\u0000\u0000\u0018\u00f4\u0001\u0000\u0000\u0000\u001a\u0101\u0001"+ + "\u0000\u0000\u0000\u001c\u0103\u0001\u0000\u0000\u0000\u001e\u010f\u0001"+ + "\u0000\u0000\u0000 \u011b\u0001\u0000\u0000\u0000\"\u011e\u0001\u0000"+ + "\u0000\u0000$\u0126\u0001\u0000\u0000\u0000&\u012c\u0001\u0000\u0000\u0000"+ + "(\u012e\u0001\u0000\u0000\u0000*\u0136\u0001\u0000\u0000\u0000,\u013e"+ + "\u0001\u0000\u0000\u0000.\u0140\u0001\u0000\u0000\u00000\u016c\u0001\u0000"+ + "\u0000\u00002\u016e\u0001\u0000\u0000\u00004\u0171\u0001\u0000\u0000\u0000"+ + "6\u017a\u0001\u0000\u0000\u00008\u0182\u0001\u0000\u0000\u0000:\u018b"+ + "\u0001\u0000\u0000\u0000<\u0194\u0001\u0000\u0000\u0000>\u019d\u0001\u0000"+ + "\u0000\u0000@\u01a1\u0001\u0000\u0000\u0000B\u01a7\u0001\u0000\u0000\u0000"+ + "D\u01ab\u0001\u0000\u0000\u0000F\u01ae\u0001\u0000\u0000\u0000H\u01b6"+ + "\u0001\u0000\u0000\u0000J\u01ba\u0001\u0000\u0000\u0000L\u01be\u0001\u0000"+ + "\u0000\u0000N\u01c1\u0001\u0000\u0000\u0000P\u01c6\u0001\u0000\u0000\u0000"+ + "R\u01ca\u0001\u0000\u0000\u0000T\u01cc\u0001\u0000\u0000\u0000V\u01ce"+ + "\u0001\u0000\u0000\u0000X\u01d1\u0001\u0000\u0000\u0000Z\u01d9\u0001\u0000"+ + "\u0000\u0000\\\u01db\u0001\u0000\u0000\u0000^\u01f5\u0001\u0000\u0000"+ + "\u0000`\u01f9\u0001\u0000\u0000\u0000bc\u0003\u0002\u0001\u0000cd\u0005"+ + "\u0000\u0000\u0001d\u0001\u0001\u0000\u0000\u0000ef\u0006\u0001\uffff"+ + "\uffff\u0000fg\u0003\u0004\u0002\u0000gm\u0001\u0000\u0000\u0000hi\n\u0001"+ + "\u0000\u0000ij\u0005\u0019\u0000\u0000jl\u0003\u0006\u0003\u0000kh\u0001"+ + "\u0000\u0000\u0000lo\u0001\u0000\u0000\u0000mk\u0001\u0000\u0000\u0000"+ + "mn\u0001\u0000\u0000\u0000n\u0003\u0001\u0000\u0000\u0000om\u0001\u0000"+ + "\u0000\u0000pu\u0003V+\u0000qu\u0003\u001c\u000e\u0000ru\u0003\u0016\u000b"+ + "\u0000su\u0003Z-\u0000tp\u0001\u0000\u0000\u0000tq\u0001\u0000\u0000\u0000"+ + "tr\u0001\u0000\u0000\u0000ts\u0001\u0000\u0000\u0000u\u0005\u0001\u0000"+ + "\u0000\u0000v\u0084\u0003 \u0010\u0000w\u0084\u0003$\u0012\u0000x\u0084"+ + "\u00032\u0019\u0000y\u0084\u00038\u001c\u0000z\u0084\u00034\u001a\u0000"+ + "{\u0084\u0003\"\u0011\u0000|\u0084\u0003\b\u0004\u0000}\u0084\u0003:\u001d"+ + "\u0000~\u0084\u0003<\u001e\u0000\u007f\u0084\u0003@ \u0000\u0080\u0084"+ + "\u0003B!\u0000\u0081\u0084\u0003\\.\u0000\u0082\u0084\u0003D\"\u0000\u0083"+ + "v\u0001\u0000\u0000\u0000\u0083w\u0001\u0000\u0000\u0000\u0083x\u0001"+ + "\u0000\u0000\u0000\u0083y\u0001\u0000\u0000\u0000\u0083z\u0001\u0000\u0000"+ + "\u0000\u0083{\u0001\u0000\u0000\u0000\u0083|\u0001\u0000\u0000\u0000\u0083"+ + "}\u0001\u0000\u0000\u0000\u0083~\u0001\u0000\u0000\u0000\u0083\u007f\u0001"+ + "\u0000\u0000\u0000\u0083\u0080\u0001\u0000\u0000\u0000\u0083\u0081\u0001"+ + "\u0000\u0000\u0000\u0083\u0082\u0001\u0000\u0000\u0000\u0084\u0007\u0001"+ + "\u0000\u0000\u0000\u0085\u0086\u0005\u0011\u0000\u0000\u0086\u0087\u0003"+ + "\n\u0005\u0000\u0087\t\u0001\u0000\u0000\u0000\u0088\u0089\u0006\u0005"+ + "\uffff\uffff\u0000\u0089\u008a\u0005+\u0000\u0000\u008a\u00a5\u0003\n"+ + "\u0005\u0007\u008b\u00a5\u0003\u000e\u0007\u0000\u008c\u00a5\u0003\f\u0006"+ + "\u0000\u008d\u008f\u0003\u000e\u0007\u0000\u008e\u0090\u0005+\u0000\u0000"+ + "\u008f\u008e\u0001\u0000\u0000\u0000\u008f\u0090\u0001\u0000\u0000\u0000"+ + "\u0090\u0091\u0001\u0000\u0000\u0000\u0091\u0092\u0005(\u0000\u0000\u0092"+ + "\u0093\u0005\'\u0000\u0000\u0093\u0098\u0003\u000e\u0007\u0000\u0094\u0095"+ + "\u0005!\u0000\u0000\u0095\u0097\u0003\u000e\u0007\u0000\u0096\u0094\u0001"+ + "\u0000\u0000\u0000\u0097\u009a\u0001\u0000\u0000\u0000\u0098\u0096\u0001"+ + "\u0000\u0000\u0000\u0098\u0099\u0001\u0000\u0000\u0000\u0099\u009b\u0001"+ + "\u0000\u0000\u0000\u009a\u0098\u0001\u0000\u0000\u0000\u009b\u009c\u0005"+ + "1\u0000\u0000\u009c\u00a5\u0001\u0000\u0000\u0000\u009d\u009e\u0003\u000e"+ + "\u0007\u0000\u009e\u00a0\u0005)\u0000\u0000\u009f\u00a1\u0005+\u0000\u0000"+ + "\u00a0\u009f\u0001\u0000\u0000\u0000\u00a0\u00a1\u0001\u0000\u0000\u0000"+ + "\u00a1\u00a2\u0001\u0000\u0000\u0000\u00a2\u00a3\u0005,\u0000\u0000\u00a3"+ + "\u00a5\u0001\u0000\u0000\u0000\u00a4\u0088\u0001\u0000\u0000\u0000\u00a4"+ + "\u008b\u0001\u0000\u0000\u0000\u00a4\u008c\u0001\u0000\u0000\u0000\u00a4"+ + "\u008d\u0001\u0000\u0000\u0000\u00a4\u009d\u0001\u0000\u0000\u0000\u00a5"+ + "\u00ae\u0001\u0000\u0000\u0000\u00a6\u00a7\n\u0004\u0000\u0000\u00a7\u00a8"+ + "\u0005\u001e\u0000\u0000\u00a8\u00ad\u0003\n\u0005\u0005\u00a9\u00aa\n"+ + "\u0003\u0000\u0000\u00aa\u00ab\u0005.\u0000\u0000\u00ab\u00ad\u0003\n"+ + "\u0005\u0004\u00ac\u00a6\u0001\u0000\u0000\u0000\u00ac\u00a9\u0001\u0000"+ + "\u0000\u0000\u00ad\u00b0\u0001\u0000\u0000\u0000\u00ae\u00ac\u0001\u0000"+ + "\u0000\u0000\u00ae\u00af\u0001\u0000\u0000\u0000\u00af\u000b\u0001\u0000"+ + "\u0000\u0000\u00b0\u00ae\u0001\u0000\u0000\u0000\u00b1\u00b3\u0003\u000e"+ + "\u0007\u0000\u00b2\u00b4\u0005+\u0000\u0000\u00b3\u00b2\u0001\u0000\u0000"+ + "\u0000\u00b3\u00b4\u0001\u0000\u0000\u0000\u00b4\u00b5\u0001\u0000\u0000"+ + "\u0000\u00b5\u00b6\u0005*\u0000\u0000\u00b6\u00b7\u0003R)\u0000\u00b7"+ + "\u00c0\u0001\u0000\u0000\u0000\u00b8\u00ba\u0003\u000e\u0007\u0000\u00b9"+ + "\u00bb\u0005+\u0000\u0000\u00ba\u00b9\u0001\u0000\u0000\u0000\u00ba\u00bb"+ + "\u0001\u0000\u0000\u0000\u00bb\u00bc\u0001\u0000\u0000\u0000\u00bc\u00bd"+ + "\u00050\u0000\u0000\u00bd\u00be\u0003R)\u0000\u00be\u00c0\u0001\u0000"+ + "\u0000\u0000\u00bf\u00b1\u0001\u0000\u0000\u0000\u00bf\u00b8\u0001\u0000"+ + "\u0000\u0000\u00c0\r\u0001\u0000\u0000\u0000\u00c1\u00c7\u0003\u0010\b"+ + "\u0000\u00c2\u00c3\u0003\u0010\b\u0000\u00c3\u00c4\u0003T*\u0000\u00c4"+ + "\u00c5\u0003\u0010\b\u0000\u00c5\u00c7\u0001\u0000\u0000\u0000\u00c6\u00c1"+ + "\u0001\u0000\u0000\u0000\u00c6\u00c2\u0001\u0000\u0000\u0000\u00c7\u000f"+ + "\u0001\u0000\u0000\u0000\u00c8\u00c9\u0006\b\uffff\uffff\u0000\u00c9\u00cd"+ + "\u0003\u0012\t\u0000\u00ca\u00cb\u0007\u0000\u0000\u0000\u00cb\u00cd\u0003"+ + "\u0010\b\u0003\u00cc\u00c8\u0001\u0000\u0000\u0000\u00cc\u00ca\u0001\u0000"+ + "\u0000\u0000\u00cd\u00d6\u0001\u0000\u0000\u0000\u00ce\u00cf\n\u0002\u0000"+ + "\u0000\u00cf\u00d0\u0007\u0001\u0000\u0000\u00d0\u00d5\u0003\u0010\b\u0003"+ + "\u00d1\u00d2\n\u0001\u0000\u0000\u00d2\u00d3\u0007\u0000\u0000\u0000\u00d3"+ + "\u00d5\u0003\u0010\b\u0002\u00d4\u00ce\u0001\u0000\u0000\u0000\u00d4\u00d1"+ + "\u0001\u0000\u0000\u0000\u00d5\u00d8\u0001\u0000\u0000\u0000\u00d6\u00d4"+ + "\u0001\u0000\u0000\u0000\u00d6\u00d7\u0001\u0000\u0000\u0000\u00d7\u0011"+ + "\u0001\u0000\u0000\u0000\u00d8\u00d6\u0001\u0000\u0000\u0000\u00d9\u00e1"+ + "\u00030\u0018\u0000\u00da\u00e1\u0003(\u0014\u0000\u00db\u00e1\u0003\u0014"+ + "\n\u0000\u00dc\u00dd\u0005\'\u0000\u0000\u00dd\u00de\u0003\n\u0005\u0000"+ + "\u00de\u00df\u00051\u0000\u0000\u00df\u00e1\u0001\u0000\u0000\u0000\u00e0"+ + "\u00d9\u0001\u0000\u0000\u0000\u00e0\u00da\u0001\u0000\u0000\u0000\u00e0"+ + "\u00db\u0001\u0000\u0000\u0000\u00e0\u00dc\u0001\u0000\u0000\u0000\u00e1"+ + "\u0013\u0001\u0000\u0000\u0000\u00e2\u00e3\u0003,\u0016\u0000\u00e3\u00ed"+ + "\u0005\'\u0000\u0000\u00e4\u00ee\u0005<\u0000\u0000\u00e5\u00ea\u0003"+ + "\n\u0005\u0000\u00e6\u00e7\u0005!\u0000\u0000\u00e7\u00e9\u0003\n\u0005"+ + "\u0000\u00e8\u00e6\u0001\u0000\u0000\u0000\u00e9\u00ec\u0001\u0000\u0000"+ + "\u0000\u00ea\u00e8\u0001\u0000\u0000\u0000\u00ea\u00eb\u0001\u0000\u0000"+ + "\u0000\u00eb\u00ee\u0001\u0000\u0000\u0000\u00ec\u00ea\u0001\u0000\u0000"+ + "\u0000\u00ed\u00e4\u0001\u0000\u0000\u0000\u00ed\u00e5\u0001\u0000\u0000"+ + "\u0000\u00ed\u00ee\u0001\u0000\u0000\u0000\u00ee\u00ef\u0001\u0000\u0000"+ + "\u0000\u00ef\u00f0\u00051\u0000\u0000\u00f0\u0015\u0001\u0000\u0000\u0000"+ + "\u00f1\u00f2\u0005\r\u0000\u0000\u00f2\u00f3\u0003\u0018\f\u0000\u00f3"+ + "\u0017\u0001\u0000\u0000\u0000\u00f4\u00f9\u0003\u001a\r\u0000\u00f5\u00f6"+ + "\u0005!\u0000\u0000\u00f6\u00f8\u0003\u001a\r\u0000\u00f7\u00f5\u0001"+ + "\u0000\u0000\u0000\u00f8\u00fb\u0001\u0000\u0000\u0000\u00f9\u00f7\u0001"+ + "\u0000\u0000\u0000\u00f9\u00fa\u0001\u0000\u0000\u0000\u00fa\u0019\u0001"+ + "\u0000\u0000\u0000\u00fb\u00f9\u0001\u0000\u0000\u0000\u00fc\u0102\u0003"+ + "\n\u0005\u0000\u00fd\u00fe\u0003(\u0014\u0000\u00fe\u00ff\u0005 \u0000"+ + "\u0000\u00ff\u0100\u0003\n\u0005\u0000\u0100\u0102\u0001\u0000\u0000\u0000"+ + "\u0101\u00fc\u0001\u0000\u0000\u0000\u0101\u00fd\u0001\u0000\u0000\u0000"+ + "\u0102\u001b\u0001\u0000\u0000\u0000\u0103\u0104\u0005\u0006\u0000\u0000"+ + "\u0104\u0109\u0003&\u0013\u0000\u0105\u0106\u0005!\u0000\u0000\u0106\u0108"+ + "\u0003&\u0013\u0000\u0107\u0105\u0001\u0000\u0000\u0000\u0108\u010b\u0001"+ + "\u0000\u0000\u0000\u0109\u0107\u0001\u0000\u0000\u0000\u0109\u010a\u0001"+ + "\u0000\u0000\u0000\u010a\u010d\u0001\u0000\u0000\u0000\u010b\u0109\u0001"+ + "\u0000\u0000\u0000\u010c\u010e\u0003\u001e\u000f\u0000\u010d\u010c\u0001"+ + "\u0000\u0000\u0000\u010d\u010e\u0001\u0000\u0000\u0000\u010e\u001d\u0001"+ + "\u0000\u0000\u0000\u010f\u0110\u0005?\u0000\u0000\u0110\u0111\u0005F\u0000"+ + "\u0000\u0111\u0116\u0003&\u0013\u0000\u0112\u0113\u0005!\u0000\u0000\u0113"+ + "\u0115\u0003&\u0013\u0000\u0114\u0112\u0001\u0000\u0000\u0000\u0115\u0118"+ + "\u0001\u0000\u0000\u0000\u0116\u0114\u0001\u0000\u0000\u0000\u0116\u0117"+ + "\u0001\u0000\u0000\u0000\u0117\u0119\u0001\u0000\u0000\u0000\u0118\u0116"+ + "\u0001\u0000\u0000\u0000\u0119\u011a\u0005@\u0000\u0000\u011a\u001f\u0001"+ + "\u0000\u0000\u0000\u011b\u011c\u0005\u0004\u0000\u0000\u011c\u011d\u0003"+ + "\u0018\f\u0000\u011d!\u0001\u0000\u0000\u0000\u011e\u0120\u0005\u0010"+ + "\u0000\u0000\u011f\u0121\u0003\u0018\f\u0000\u0120\u011f\u0001\u0000\u0000"+ + "\u0000\u0120\u0121\u0001\u0000\u0000\u0000\u0121\u0124\u0001\u0000\u0000"+ + "\u0000\u0122\u0123\u0005\u001d\u0000\u0000\u0123\u0125\u0003\u0018\f\u0000"+ + "\u0124\u0122\u0001\u0000\u0000\u0000\u0124\u0125\u0001\u0000\u0000\u0000"+ + "\u0125#\u0001\u0000\u0000\u0000\u0126\u0127\u0005\b\u0000\u0000\u0127"+ + "\u012a\u0003\u0018\f\u0000\u0128\u0129\u0005\u001d\u0000\u0000\u0129\u012b"+ + "\u0003\u0018\f\u0000\u012a\u0128\u0001\u0000\u0000\u0000\u012a\u012b\u0001"+ + "\u0000\u0000\u0000\u012b%\u0001\u0000\u0000\u0000\u012c\u012d\u0007\u0002"+ + "\u0000\u0000\u012d\'\u0001\u0000\u0000\u0000\u012e\u0133\u0003,\u0016"+ + "\u0000\u012f\u0130\u0005#\u0000\u0000\u0130\u0132\u0003,\u0016\u0000\u0131"+ + "\u012f\u0001\u0000\u0000\u0000\u0132\u0135\u0001\u0000\u0000\u0000\u0133"+ + "\u0131\u0001\u0000\u0000\u0000\u0133\u0134\u0001\u0000\u0000\u0000\u0134"+ + ")\u0001\u0000\u0000\u0000\u0135\u0133\u0001\u0000\u0000\u0000\u0136\u013b"+ + "\u0003.\u0017\u0000\u0137\u0138\u0005#\u0000\u0000\u0138\u013a\u0003."+ + "\u0017\u0000\u0139\u0137\u0001\u0000\u0000\u0000\u013a\u013d\u0001\u0000"+ + "\u0000\u0000\u013b\u0139\u0001\u0000\u0000\u0000\u013b\u013c\u0001\u0000"+ + "\u0000\u0000\u013c+\u0001\u0000\u0000\u0000\u013d\u013b\u0001\u0000\u0000"+ + "\u0000\u013e\u013f\u0007\u0003\u0000\u0000\u013f-\u0001\u0000\u0000\u0000"+ + "\u0140\u0141\u0007\u0004\u0000\u0000\u0141/\u0001\u0000\u0000\u0000\u0142"+ + "\u016d\u0005,\u0000\u0000\u0143\u0144\u0003P(\u0000\u0144\u0145\u0005"+ + "A\u0000\u0000\u0145\u016d\u0001\u0000\u0000\u0000\u0146\u016d\u0003N\'"+ + "\u0000\u0147\u016d\u0003P(\u0000\u0148\u016d\u0003J%\u0000\u0149\u016d"+ + "\u0005/\u0000\u0000\u014a\u016d\u0003R)\u0000\u014b\u014c\u0005?\u0000"+ + "\u0000\u014c\u0151\u0003L&\u0000\u014d\u014e\u0005!\u0000\u0000\u014e"+ + "\u0150\u0003L&\u0000\u014f\u014d\u0001\u0000\u0000\u0000\u0150\u0153\u0001"+ + "\u0000\u0000\u0000\u0151\u014f\u0001\u0000\u0000\u0000\u0151\u0152\u0001"+ + "\u0000\u0000\u0000\u0152\u0154\u0001\u0000\u0000\u0000\u0153\u0151\u0001"+ + "\u0000\u0000\u0000\u0154\u0155\u0005@\u0000\u0000\u0155\u016d\u0001\u0000"+ + "\u0000\u0000\u0156\u0157\u0005?\u0000\u0000\u0157\u015c\u0003J%\u0000"+ + "\u0158\u0159\u0005!\u0000\u0000\u0159\u015b\u0003J%\u0000\u015a\u0158"+ + "\u0001\u0000\u0000\u0000\u015b\u015e\u0001\u0000\u0000\u0000\u015c\u015a"+ + "\u0001\u0000\u0000\u0000\u015c\u015d\u0001\u0000\u0000\u0000\u015d\u015f"+ + "\u0001\u0000\u0000\u0000\u015e\u015c\u0001\u0000\u0000\u0000\u015f\u0160"+ + "\u0005@\u0000\u0000\u0160\u016d\u0001\u0000\u0000\u0000\u0161\u0162\u0005"+ + "?\u0000\u0000\u0162\u0167\u0003R)\u0000\u0163\u0164\u0005!\u0000\u0000"+ + "\u0164\u0166\u0003R)\u0000\u0165\u0163\u0001\u0000\u0000\u0000\u0166\u0169"+ + "\u0001\u0000\u0000\u0000\u0167\u0165\u0001\u0000\u0000\u0000\u0167\u0168"+ + "\u0001\u0000\u0000\u0000\u0168\u016a\u0001\u0000\u0000\u0000\u0169\u0167"+ + "\u0001\u0000\u0000\u0000\u016a\u016b\u0005@\u0000\u0000\u016b\u016d\u0001"+ + "\u0000\u0000\u0000\u016c\u0142\u0001\u0000\u0000\u0000\u016c\u0143\u0001"+ + "\u0000\u0000\u0000\u016c\u0146\u0001\u0000\u0000\u0000\u016c\u0147\u0001"+ + "\u0000\u0000\u0000\u016c\u0148\u0001\u0000\u0000\u0000\u016c\u0149\u0001"+ + "\u0000\u0000\u0000\u016c\u014a\u0001\u0000\u0000\u0000\u016c\u014b\u0001"+ + "\u0000\u0000\u0000\u016c\u0156\u0001\u0000\u0000\u0000\u016c\u0161\u0001"+ + "\u0000\u0000\u0000\u016d1\u0001\u0000\u0000\u0000\u016e\u016f\u0005\n"+ + "\u0000\u0000\u016f\u0170\u0005\u001b\u0000\u0000\u01703\u0001\u0000\u0000"+ + "\u0000\u0171\u0172\u0005\u000f\u0000\u0000\u0172\u0177\u00036\u001b\u0000"+ + "\u0173\u0174\u0005!\u0000\u0000\u0174\u0176\u00036\u001b\u0000\u0175\u0173"+ "\u0001\u0000\u0000\u0000\u0176\u0179\u0001\u0000\u0000\u0000\u0177\u0175"+ "\u0001\u0000\u0000\u0000\u0177\u0178\u0001\u0000\u0000\u0000\u01785\u0001"+ "\u0000\u0000\u0000\u0179\u0177\u0001\u0000\u0000\u0000\u017a\u017c\u0003"+ "\n\u0005\u0000\u017b\u017d\u0007\u0005\u0000\u0000\u017c\u017b\u0001\u0000"+ "\u0000\u0000\u017c\u017d\u0001\u0000\u0000\u0000\u017d\u0180\u0001\u0000"+ - "\u0000\u0000\u017e\u017f\u0005.\u0000\u0000\u017f\u0181\u0007\u0006\u0000"+ + "\u0000\u0000\u017e\u017f\u0005-\u0000\u0000\u017f\u0181\u0007\u0006\u0000"+ "\u0000\u0180\u017e\u0001\u0000\u0000\u0000\u0180\u0181\u0001\u0000\u0000"+ "\u0000\u01817\u0001\u0000\u0000\u0000\u0182\u0183\u0005\t\u0000\u0000"+ - "\u0183\u0188\u0003*\u0015\u0000\u0184\u0185\u0005\"\u0000\u0000\u0185"+ - "\u0187\u0003*\u0015\u0000\u0186\u0184\u0001\u0000\u0000\u0000\u0187\u018a"+ - "\u0001\u0000\u0000\u0000\u0188\u0186\u0001\u0000\u0000\u0000\u0188\u0189"+ - "\u0001\u0000\u0000\u0000\u0189\u0195\u0001\u0000\u0000\u0000\u018a\u0188"+ - "\u0001\u0000\u0000\u0000\u018b\u018c\u0005\f\u0000\u0000\u018c\u0191\u0003"+ - "*\u0015\u0000\u018d\u018e\u0005\"\u0000\u0000\u018e\u0190\u0003*\u0015"+ - "\u0000\u018f\u018d\u0001\u0000\u0000\u0000\u0190\u0193\u0001\u0000\u0000"+ - "\u0000\u0191\u018f\u0001\u0000\u0000\u0000\u0191\u0192\u0001\u0000\u0000"+ - "\u0000\u0192\u0195\u0001\u0000\u0000\u0000\u0193\u0191\u0001\u0000\u0000"+ - "\u0000\u0194\u0182\u0001\u0000\u0000\u0000\u0194\u018b\u0001\u0000\u0000"+ - "\u0000\u01959\u0001\u0000\u0000\u0000\u0196\u0197\u0005\u0002\u0000\u0000"+ - "\u0197\u019c\u0003*\u0015\u0000\u0198\u0199\u0005\"\u0000\u0000\u0199"+ - "\u019b\u0003*\u0015\u0000\u019a\u0198\u0001\u0000\u0000\u0000\u019b\u019e"+ - "\u0001\u0000\u0000\u0000\u019c\u019a\u0001\u0000\u0000\u0000\u019c\u019d"+ - "\u0001\u0000\u0000\u0000\u019d;\u0001\u0000\u0000\u0000\u019e\u019c\u0001"+ - "\u0000\u0000\u0000\u019f\u01a0\u0005\r\u0000\u0000\u01a0\u01a5\u0003>"+ - "\u001f\u0000\u01a1\u01a2\u0005\"\u0000\u0000\u01a2\u01a4\u0003>\u001f"+ - "\u0000\u01a3\u01a1\u0001\u0000\u0000\u0000\u01a4\u01a7\u0001\u0000\u0000"+ - "\u0000\u01a5\u01a3\u0001\u0000\u0000\u0000\u01a5\u01a6\u0001\u0000\u0000"+ - "\u0000\u01a6=\u0001\u0000\u0000\u0000\u01a7\u01a5\u0001\u0000\u0000\u0000"+ - "\u01a8\u01a9\u0003*\u0015\u0000\u01a9\u01aa\u0005P\u0000\u0000\u01aa\u01ab"+ - "\u0003*\u0015\u0000\u01ab?\u0001\u0000\u0000\u0000\u01ac\u01ad\u0005\u0001"+ - "\u0000\u0000\u01ad\u01ae\u0003\u0012\t\u0000\u01ae\u01b0\u0003R)\u0000"+ - "\u01af\u01b1\u0003F#\u0000\u01b0\u01af\u0001\u0000\u0000\u0000\u01b0\u01b1"+ - "\u0001\u0000\u0000\u0000\u01b1A\u0001\u0000\u0000\u0000\u01b2\u01b3\u0005"+ - "\u0007\u0000\u0000\u01b3\u01b4\u0003\u0012\t\u0000\u01b4\u01b5\u0003R"+ - ")\u0000\u01b5C\u0001\u0000\u0000\u0000\u01b6\u01b7\u0005\u000b\u0000\u0000"+ - "\u01b7\u01b8\u0003(\u0014\u0000\u01b8E\u0001\u0000\u0000\u0000\u01b9\u01be"+ - "\u0003H$\u0000\u01ba\u01bb\u0005\"\u0000\u0000\u01bb\u01bd\u0003H$\u0000"+ - "\u01bc\u01ba\u0001\u0000\u0000\u0000\u01bd\u01c0\u0001\u0000\u0000\u0000"+ - "\u01be\u01bc\u0001\u0000\u0000\u0000\u01be\u01bf\u0001\u0000\u0000\u0000"+ - "\u01bfG\u0001\u0000\u0000\u0000\u01c0\u01be\u0001\u0000\u0000\u0000\u01c1"+ - "\u01c2\u0003,\u0016\u0000\u01c2\u01c3\u0005!\u0000\u0000\u01c3\u01c4\u0003"+ - "0\u0018\u0000\u01c4I\u0001\u0000\u0000\u0000\u01c5\u01c6\u0007\u0007\u0000"+ - "\u0000\u01c6K\u0001\u0000\u0000\u0000\u01c7\u01ca\u0003N\'\u0000\u01c8"+ - "\u01ca\u0003P(\u0000\u01c9\u01c7\u0001\u0000\u0000\u0000\u01c9\u01c8\u0001"+ - "\u0000\u0000\u0000\u01caM\u0001\u0000\u0000\u0000\u01cb\u01cd\u0007\u0000"+ - "\u0000\u0000\u01cc\u01cb\u0001\u0000\u0000\u0000\u01cc\u01cd\u0001\u0000"+ - "\u0000\u0000\u01cd\u01ce\u0001\u0000\u0000\u0000\u01ce\u01cf\u0005\u001d"+ - "\u0000\u0000\u01cfO\u0001\u0000\u0000\u0000\u01d0\u01d2\u0007\u0000\u0000"+ - "\u0000\u01d1\u01d0\u0001\u0000\u0000\u0000\u01d1\u01d2\u0001\u0000\u0000"+ - "\u0000\u01d2\u01d3\u0001\u0000\u0000\u0000\u01d3\u01d4\u0005\u001c\u0000"+ - "\u0000\u01d4Q\u0001\u0000\u0000\u0000\u01d5\u01d6\u0005\u001b\u0000\u0000"+ - "\u01d6S\u0001\u0000\u0000\u0000\u01d7\u01d8\u0007\b\u0000\u0000\u01d8"+ - "U\u0001\u0000\u0000\u0000\u01d9\u01da\u0005\u0005\u0000\u0000\u01da\u01db"+ - "\u0003X,\u0000\u01dbW\u0001\u0000\u0000\u0000\u01dc\u01dd\u0005@\u0000"+ - "\u0000\u01dd\u01de\u0003\u0002\u0001\u0000\u01de\u01df\u0005A\u0000\u0000"+ - "\u01dfY\u0001\u0000\u0000\u0000\u01e0\u01e1\u0005\u000f\u0000\u0000\u01e1"+ - "\u01e5\u0005`\u0000\u0000\u01e2\u01e3\u0005\u000f\u0000\u0000\u01e3\u01e5"+ - "\u0005a\u0000\u0000\u01e4\u01e0\u0001\u0000\u0000\u0000\u01e4\u01e2\u0001"+ - "\u0000\u0000\u0000\u01e5[\u0001\u0000\u0000\u0000\u01e6\u01ea\u0005\u0003"+ - "\u0000\u0000\u01e7\u01e9\u0003`0\u0000\u01e8\u01e7\u0001\u0000\u0000\u0000"+ - "\u01e9\u01ec\u0001\u0000\u0000\u0000\u01ea\u01e8\u0001\u0000\u0000\u0000"+ - "\u01ea\u01eb\u0001\u0000\u0000\u0000\u01eb\u01ed\u0001\u0000\u0000\u0000"+ - "\u01ec\u01ea\u0001\u0000\u0000\u0000\u01ed\u01f0\u0005V\u0000\u0000\u01ee"+ - "\u01ef\u0005T\u0000\u0000\u01ef\u01f1\u0003*\u0015\u0000\u01f0\u01ee\u0001"+ - "\u0000\u0000\u0000\u01f0\u01f1\u0001\u0000\u0000\u0000\u01f1\u01fb\u0001"+ - "\u0000\u0000\u0000\u01f2\u01f3\u0005U\u0000\u0000\u01f3\u01f8\u0003^/"+ - "\u0000\u01f4\u01f5\u0005\"\u0000\u0000\u01f5\u01f7\u0003^/\u0000\u01f6"+ - "\u01f4\u0001\u0000\u0000\u0000\u01f7\u01fa\u0001\u0000\u0000\u0000\u01f8"+ - "\u01f6\u0001\u0000\u0000\u0000\u01f8\u01f9\u0001\u0000\u0000\u0000\u01f9"+ - "\u01fc\u0001\u0000\u0000\u0000\u01fa\u01f8\u0001\u0000\u0000\u0000\u01fb"+ - "\u01f2\u0001\u0000\u0000\u0000\u01fb\u01fc\u0001\u0000\u0000\u0000\u01fc"+ - "]\u0001\u0000\u0000\u0000\u01fd\u01fe\u0003*\u0015\u0000\u01fe\u01ff\u0005"+ - "!\u0000\u0000\u01ff\u0201\u0001\u0000\u0000\u0000\u0200\u01fd\u0001\u0000"+ - "\u0000\u0000\u0200\u0201\u0001\u0000\u0000\u0000\u0201\u0202\u0001\u0000"+ - "\u0000\u0000\u0202\u0203\u0003*\u0015\u0000\u0203_\u0001\u0000\u0000\u0000"+ - "\u0204\u0205\u0005@\u0000\u0000\u0205\u0206\u0005f\u0000\u0000\u0206\u0207"+ - "\u0005e\u0000\u0000\u0207\u0208\u0005f\u0000\u0000\u0208\u0209\u0005A"+ - "\u0000\u0000\u0209a\u0001\u0000\u0000\u00004mt\u0083\u008f\u0098\u00a0"+ - "\u00a4\u00ac\u00ae\u00b3\u00ba\u00bf\u00c6\u00cc\u00d4\u00d6\u00e0\u00ea"+ - "\u00ed\u00f9\u0101\u0109\u010d\u0116\u0120\u0124\u012a\u0133\u013b\u0151"+ - "\u015c\u0167\u016c\u0177\u017c\u0180\u0188\u0191\u0194\u019c\u01a5\u01b0"+ - "\u01be\u01c9\u01cc\u01d1\u01e4\u01ea\u01f0\u01f8\u01fb\u0200"; + "\u0183\u0188\u0003*\u0015\u0000\u0184\u0185\u0005!\u0000\u0000\u0185\u0187"+ + "\u0003*\u0015\u0000\u0186\u0184\u0001\u0000\u0000\u0000\u0187\u018a\u0001"+ + "\u0000\u0000\u0000\u0188\u0186\u0001\u0000\u0000\u0000\u0188\u0189\u0001"+ + "\u0000\u0000\u0000\u01899\u0001\u0000\u0000\u0000\u018a\u0188\u0001\u0000"+ + "\u0000\u0000\u018b\u018c\u0005\u0002\u0000\u0000\u018c\u0191\u0003*\u0015"+ + "\u0000\u018d\u018e\u0005!\u0000\u0000\u018e\u0190\u0003*\u0015\u0000\u018f"+ + "\u018d\u0001\u0000\u0000\u0000\u0190\u0193\u0001\u0000\u0000\u0000\u0191"+ + "\u018f\u0001\u0000\u0000\u0000\u0191\u0192\u0001\u0000\u0000\u0000\u0192"+ + ";\u0001\u0000\u0000\u0000\u0193\u0191\u0001\u0000\u0000\u0000\u0194\u0195"+ + "\u0005\f\u0000\u0000\u0195\u019a\u0003>\u001f\u0000\u0196\u0197\u0005"+ + "!\u0000\u0000\u0197\u0199\u0003>\u001f\u0000\u0198\u0196\u0001\u0000\u0000"+ + "\u0000\u0199\u019c\u0001\u0000\u0000\u0000\u019a\u0198\u0001\u0000\u0000"+ + "\u0000\u019a\u019b\u0001\u0000\u0000\u0000\u019b=\u0001\u0000\u0000\u0000"+ + "\u019c\u019a\u0001\u0000\u0000\u0000\u019d\u019e\u0003*\u0015\u0000\u019e"+ + "\u019f\u0005O\u0000\u0000\u019f\u01a0\u0003*\u0015\u0000\u01a0?\u0001"+ + "\u0000\u0000\u0000\u01a1\u01a2\u0005\u0001\u0000\u0000\u01a2\u01a3\u0003"+ + "\u0012\t\u0000\u01a3\u01a5\u0003R)\u0000\u01a4\u01a6\u0003F#\u0000\u01a5"+ + "\u01a4\u0001\u0000\u0000\u0000\u01a5\u01a6\u0001\u0000\u0000\u0000\u01a6"+ + "A\u0001\u0000\u0000\u0000\u01a7\u01a8\u0005\u0007\u0000\u0000\u01a8\u01a9"+ + "\u0003\u0012\t\u0000\u01a9\u01aa\u0003R)\u0000\u01aaC\u0001\u0000\u0000"+ + "\u0000\u01ab\u01ac\u0005\u000b\u0000\u0000\u01ac\u01ad\u0003(\u0014\u0000"+ + "\u01adE\u0001\u0000\u0000\u0000\u01ae\u01b3\u0003H$\u0000\u01af\u01b0"+ + "\u0005!\u0000\u0000\u01b0\u01b2\u0003H$\u0000\u01b1\u01af\u0001\u0000"+ + "\u0000\u0000\u01b2\u01b5\u0001\u0000\u0000\u0000\u01b3\u01b1\u0001\u0000"+ + "\u0000\u0000\u01b3\u01b4\u0001\u0000\u0000\u0000\u01b4G\u0001\u0000\u0000"+ + "\u0000\u01b5\u01b3\u0001\u0000\u0000\u0000\u01b6\u01b7\u0003,\u0016\u0000"+ + "\u01b7\u01b8\u0005 \u0000\u0000\u01b8\u01b9\u00030\u0018\u0000\u01b9I"+ + "\u0001\u0000\u0000\u0000\u01ba\u01bb\u0007\u0007\u0000\u0000\u01bbK\u0001"+ + "\u0000\u0000\u0000\u01bc\u01bf\u0003N\'\u0000\u01bd\u01bf\u0003P(\u0000"+ + "\u01be\u01bc\u0001\u0000\u0000\u0000\u01be\u01bd\u0001\u0000\u0000\u0000"+ + "\u01bfM\u0001\u0000\u0000\u0000\u01c0\u01c2\u0007\u0000\u0000\u0000\u01c1"+ + "\u01c0\u0001\u0000\u0000\u0000\u01c1\u01c2\u0001\u0000\u0000\u0000\u01c2"+ + "\u01c3\u0001\u0000\u0000\u0000\u01c3\u01c4\u0005\u001c\u0000\u0000\u01c4"+ + "O\u0001\u0000\u0000\u0000\u01c5\u01c7\u0007\u0000\u0000\u0000\u01c6\u01c5"+ + "\u0001\u0000\u0000\u0000\u01c6\u01c7\u0001\u0000\u0000\u0000\u01c7\u01c8"+ + "\u0001\u0000\u0000\u0000\u01c8\u01c9\u0005\u001b\u0000\u0000\u01c9Q\u0001"+ + "\u0000\u0000\u0000\u01ca\u01cb\u0005\u001a\u0000\u0000\u01cbS\u0001\u0000"+ + "\u0000\u0000\u01cc\u01cd\u0007\b\u0000\u0000\u01cdU\u0001\u0000\u0000"+ + "\u0000\u01ce\u01cf\u0005\u0005\u0000\u0000\u01cf\u01d0\u0003X,\u0000\u01d0"+ + "W\u0001\u0000\u0000\u0000\u01d1\u01d2\u0005?\u0000\u0000\u01d2\u01d3\u0003"+ + "\u0002\u0001\u0000\u01d3\u01d4\u0005@\u0000\u0000\u01d4Y\u0001\u0000\u0000"+ + "\u0000\u01d5\u01d6\u0005\u000e\u0000\u0000\u01d6\u01da\u0005_\u0000\u0000"+ + "\u01d7\u01d8\u0005\u000e\u0000\u0000\u01d8\u01da\u0005`\u0000\u0000\u01d9"+ + "\u01d5\u0001\u0000\u0000\u0000\u01d9\u01d7\u0001\u0000\u0000\u0000\u01da"+ + "[\u0001\u0000\u0000\u0000\u01db\u01df\u0005\u0003\u0000\u0000\u01dc\u01de"+ + "\u0003`0\u0000\u01dd\u01dc\u0001\u0000\u0000\u0000\u01de\u01e1\u0001\u0000"+ + "\u0000\u0000\u01df\u01dd\u0001\u0000\u0000\u0000\u01df\u01e0\u0001\u0000"+ + "\u0000\u0000\u01e0\u01e2\u0001\u0000\u0000\u0000\u01e1\u01df\u0001\u0000"+ + "\u0000\u0000\u01e2\u01e5\u0005U\u0000\u0000\u01e3\u01e4\u0005S\u0000\u0000"+ + "\u01e4\u01e6\u0003*\u0015\u0000\u01e5\u01e3\u0001\u0000\u0000\u0000\u01e5"+ + "\u01e6\u0001\u0000\u0000\u0000\u01e6\u01f0\u0001\u0000\u0000\u0000\u01e7"+ + "\u01e8\u0005T\u0000\u0000\u01e8\u01ed\u0003^/\u0000\u01e9\u01ea\u0005"+ + "!\u0000\u0000\u01ea\u01ec\u0003^/\u0000\u01eb\u01e9\u0001\u0000\u0000"+ + "\u0000\u01ec\u01ef\u0001\u0000\u0000\u0000\u01ed\u01eb\u0001\u0000\u0000"+ + "\u0000\u01ed\u01ee\u0001\u0000\u0000\u0000\u01ee\u01f1\u0001\u0000\u0000"+ + "\u0000\u01ef\u01ed\u0001\u0000\u0000\u0000\u01f0\u01e7\u0001\u0000\u0000"+ + "\u0000\u01f0\u01f1\u0001\u0000\u0000\u0000\u01f1]\u0001\u0000\u0000\u0000"+ + "\u01f2\u01f3\u0003*\u0015\u0000\u01f3\u01f4\u0005 \u0000\u0000\u01f4\u01f6"+ + "\u0001\u0000\u0000\u0000\u01f5\u01f2\u0001\u0000\u0000\u0000\u01f5\u01f6"+ + "\u0001\u0000\u0000\u0000\u01f6\u01f7\u0001\u0000\u0000\u0000\u01f7\u01f8"+ + "\u0003*\u0015\u0000\u01f8_\u0001\u0000\u0000\u0000\u01f9\u01fa\u0005?"+ + "\u0000\u0000\u01fa\u01fb\u0005e\u0000\u0000\u01fb\u01fc\u0005d\u0000\u0000"+ + "\u01fc\u01fd\u0005e\u0000\u0000\u01fd\u01fe\u0005@\u0000\u0000\u01fea"+ + "\u0001\u0000\u0000\u00002mt\u0083\u008f\u0098\u00a0\u00a4\u00ac\u00ae"+ + "\u00b3\u00ba\u00bf\u00c6\u00cc\u00d4\u00d6\u00e0\u00ea\u00ed\u00f9\u0101"+ + "\u0109\u010d\u0116\u0120\u0124\u012a\u0133\u013b\u0151\u015c\u0167\u016c"+ + "\u0177\u017c\u0180\u0188\u0191\u019a\u01a5\u01b3\u01be\u01c1\u01c6\u01d9"+ + "\u01df\u01e5\u01ed\u01f0\u01f5"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index 23a76afe41cff..7784e48a41efc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -60,7 +60,6 @@ import java.util.Set; import java.util.function.Function; -import static org.elasticsearch.common.logging.HeaderWarning.addWarning; import static org.elasticsearch.xpack.esql.plan.logical.Enrich.Mode; import static org.elasticsearch.xpack.ql.parser.ParserUtils.source; import static org.elasticsearch.xpack.ql.parser.ParserUtils.typedParsing; @@ -275,9 +274,6 @@ public PlanFactory visitRenameCommand(EsqlBaseParser.RenameCommandContext ctx) { @Override public PlanFactory visitKeepCommand(EsqlBaseParser.KeepCommandContext ctx) { - if (ctx.PROJECT() != null) { - addWarning("PROJECT command is no longer supported, please use KEEP instead"); - } var identifiers = ctx.qualifiedNamePattern(); List projections = new ArrayList<>(identifiers.size()); boolean hasSeenStar = false; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index 4b908e815ffe3..ef93f60b4f1c6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -754,8 +754,9 @@ public void testKeepStarMvExpand() { } public void testUsageOfProject() { - processingCommand("project a"); - assertWarnings("PROJECT command is no longer supported, please use KEEP instead"); + String query = "from test | project foo, bar"; + ParsingException e = expectThrows(ParsingException.class, "Expected syntax error for " + query, () -> statement(query)); + assertThat(e.getMessage(), containsString("mismatched input 'project' expecting")); } public void testInputParams() { From 9b584aa1f2c48ee808b3edce2a9f6c085e714489 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Przemys=C5=82aw=20Witek?= Date: Tue, 6 Feb 2024 18:30:25 +0100 Subject: [PATCH 074/106] [Transform] Allow transforms to use PIT with remote clusters again (#105192) --- docs/changelog/105192.yaml | 6 ++ .../transforms/ClientTransformIndexer.java | 3 +- .../ClientTransformIndexerTests.java | 70 +++---------------- 3 files changed, 15 insertions(+), 64 deletions(-) create mode 100644 docs/changelog/105192.yaml diff --git a/docs/changelog/105192.yaml b/docs/changelog/105192.yaml new file mode 100644 index 0000000000000..b15d58ef40fe7 --- /dev/null +++ b/docs/changelog/105192.yaml @@ -0,0 +1,6 @@ +pr: 105192 +summary: Allow transforms to use PIT with remote clusters again +area: Transform +type: enhancement +issues: + - 104518 diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java index 55f0290c20a1c..1f9ec86ada8e2 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java @@ -449,8 +449,7 @@ private void injectPointInTimeIfNeeded( ActionListener> listener ) { SearchRequest searchRequest = namedSearchRequest.v2(); - // We explicitly disable PIT in the presence of remote clusters in the source due to huge PIT handles causing performance problems. - if (disablePit || searchRequest.indices().length == 0 || transformConfig.getSource().requiresRemoteCluster()) { + if (disablePit || searchRequest.indices().length == 0) { listener.onResponse(namedSearchRequest); return; } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java index fa8e867d77a49..1c6d1615cbb9c 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java @@ -293,8 +293,14 @@ public void testPitInjectionIfPitNotSupported() throws InterruptedException { } public void testDisablePit() throws InterruptedException { - // TransformConfigTests.randomTransformConfig never produces remote indices in the source, hence we are safe here. */ - TransformConfig config = TransformConfigTests.randomTransformConfig(); + TransformConfig.Builder configBuilder = new TransformConfig.Builder(TransformConfigTests.randomTransformConfig()); + if (randomBoolean()) { + // TransformConfigTests.randomTransformConfig never produces remote indices in the source. + // We need to explicitly set the remote index here for coverage. + configBuilder.setSource(new SourceConfig("remote-cluster:remote-index")); + } + TransformConfig config = configBuilder.build(); + boolean pitEnabled = config.getSettings().getUsePit() == null || config.getSettings().getUsePit(); try (var threadPool = createThreadPool()) { @@ -354,66 +360,6 @@ public void testDisablePit() throws InterruptedException { } } - public void testDisablePitWhenThereIsRemoteIndexInSource() throws InterruptedException { - TransformConfig config = new TransformConfig.Builder(TransformConfigTests.randomTransformConfig()) - // Remote index is configured within source - .setSource(new SourceConfig("remote-cluster:remote-index")) - .build(); - boolean pitEnabled = config.getSettings().getUsePit() == null || config.getSettings().getUsePit(); - - try (var threadPool = createThreadPool()) { - final var client = new PitMockClient(threadPool, true); - MockClientTransformIndexer indexer = new MockClientTransformIndexer( - mock(ThreadPool.class), - new TransformServices( - mock(IndexBasedTransformConfigManager.class), - mock(TransformCheckpointService.class), - mock(TransformAuditor.class), - new TransformScheduler(Clock.systemUTC(), mock(ThreadPool.class), Settings.EMPTY, TimeValue.ZERO) - ), - mock(CheckpointProvider.class), - new AtomicReference<>(IndexerState.STOPPED), - null, - new ParentTaskAssigningClient(client, new TaskId("dummy-node:123456")), - mock(TransformIndexerStats.class), - config, - null, - new TransformCheckpoint( - "transform", - Instant.now().toEpochMilli(), - 0L, - Collections.emptyMap(), - Instant.now().toEpochMilli() - ), - new TransformCheckpoint( - "transform", - Instant.now().toEpochMilli(), - 2L, - Collections.emptyMap(), - Instant.now().toEpochMilli() - ), - new SeqNoPrimaryTermAndIndex(1, 1, TransformInternalIndexConstants.LATEST_INDEX_NAME), - mock(TransformContext.class), - false - ); - - // Because remote index is configured within source, we expect PIT *not* being used regardless the transform settings - this.assertAsync( - listener -> indexer.doNextSearch(0, listener), - response -> assertNull(response.pointInTimeId()) - ); - - // reverse the setting - indexer.applyNewSettings(new SettingsConfig.Builder().setUsePit(pitEnabled == false).build()); - - // Because remote index is configured within source, we expect PIT *not* being used regardless the transform settings - this.assertAsync( - listener -> indexer.doNextSearch(0, listener), - response -> assertNull(response.pointInTimeId()) - ); - } - } - public void testHandlePitIndexNotFound() throws InterruptedException { // simulate a deleted index due to ILM try (var threadPool = createThreadPool()) { From a7ca62de8eef4966ad0ed92d1d318d2b3d854615 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 6 Feb 2024 12:34:02 -0500 Subject: [PATCH 075/106] Document ESQL docs examples (#105197) This adds some docs to the top of `docs.csv-spec` and `docs-IT_tests_only.csv-spec` telling folks not to add more stuff there and instead put new examples into whatever files they line up with. It also shifts some things out of the file to "prime the pump" on cleaning it up. --- .../esql/processing-commands/drop.asciidoc | 4 +- .../esql/processing-commands/limit.asciidoc | 2 +- .../resources/docs-IT_tests_only.csv-spec | 13 +++++ .../src/main/resources/docs.csv-spec | 48 +++++-------------- .../src/main/resources/drop.csv-spec | 20 ++++++++ .../src/main/resources/limit.csv-spec | 16 +++++++ 6 files changed, 64 insertions(+), 39 deletions(-) create mode 100644 x-pack/plugin/esql/qa/testFixtures/src/main/resources/limit.csv-spec diff --git a/docs/reference/esql/processing-commands/drop.asciidoc b/docs/reference/esql/processing-commands/drop.asciidoc index 4787c5f137314..8f03141d5e05a 100644 --- a/docs/reference/esql/processing-commands/drop.asciidoc +++ b/docs/reference/esql/processing-commands/drop.asciidoc @@ -22,7 +22,7 @@ The `DROP` processing command removes one or more columns. [source,esql] ---- -include::{esql-specs}/docs.csv-spec[tag=dropheight] +include::{esql-specs}/drop.csv-spec[tag=height] ---- Rather than specify each column by name, you can use wildcards to drop all @@ -30,5 +30,5 @@ columns with a name that matches a pattern: [source,esql] ---- -include::{esql-specs}/docs.csv-spec[tag=dropheightwithwildcard] +include::{esql-specs}/drop.csv-spec[tag=heightWithWildcard] ---- diff --git a/docs/reference/esql/processing-commands/limit.asciidoc b/docs/reference/esql/processing-commands/limit.asciidoc index 5f659fc493a75..4ccf3024a4c1e 100644 --- a/docs/reference/esql/processing-commands/limit.asciidoc +++ b/docs/reference/esql/processing-commands/limit.asciidoc @@ -43,5 +43,5 @@ settings: [source,esql] ---- -include::{esql-specs}/docs.csv-spec[tag=limit] +include::{esql-specs}/limit.csv-spec[tag=basic] ---- diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs-IT_tests_only.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs-IT_tests_only.csv-spec index ee8e5e0d784ce..f4bf2333cae86 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs-IT_tests_only.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs-IT_tests_only.csv-spec @@ -1,3 +1,16 @@ +// This file contains any ESQL snippets from the docs that don't have a home +// anywhere else. The Isle of Misfit Toys. When you need to add new examples +// for the docs you should try to convert an existing test first. Just add +// the comments in whatever file the test already lives in. If you have to +// write a new test to make an example in the docs then put it in whatever +// file matches it's "theme" best. Put it next to similar tests. Not here. + +// Also! When Nik originally extracted examples from the docs to make them +// testable he didn't spend a lot of time putting the docs into appropriate +// files. He just made this one. He didn't put his toys away. We'd be better +// off not adding to this strange toy-pile and instead moving things into +// the appropriate files. + enrich // tag::enrich[] ROW language_code = "1" diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec index 2c707a7a87c4f..71bb1e9f28723 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec @@ -1,39 +1,15 @@ -docsDropHeight -// tag::dropheight[] -FROM employees -| DROP height -// end::dropheight[] -| LIMIT 0; - -avg_worked_seconds:long | birth_date:date | emp_no:integer | first_name:keyword | gender:keyword | height.float:double | height.half_float:double | height.scaled_float:double | hire_date:date | is_rehired:boolean | job_positions:keyword | languages:integer | languages.byte:integer | languages.long:long | languages.short:integer | last_name:keyword | salary:integer | salary_change:double | salary_change.int:integer |salary_change.keyword:keyword |salary_change.long:long | still_hired:boolean -; - -docsDropHeightWithWildcard -// tag::dropheightwithwildcard[] -FROM employees -| DROP height* -// end::dropheightwithwildcard[] -| LIMIT 0; - -avg_worked_seconds:long | birth_date:date | emp_no:integer | first_name:keyword | gender:keyword | hire_date:date | is_rehired:boolean | job_positions:keyword | languages:integer | languages.byte:integer | languages.long:long | languages.short:integer | last_name:keyword | salary:integer | salary_change:double | salary_change.int:integer |salary_change.keyword:keyword |salary_change.long:long | still_hired:boolean -; - -docsLimit -// tag::limit[] -FROM employees -| SORT emp_no ASC -| LIMIT 5 -// end::limit[] -| KEEP emp_no -; - -emp_no:integer -10001 -10002 -10003 -10004 -10005 -; +// This file contains any ESQL snippets from the docs that don't have a home +// anywhere else. The Isle of Misfit Toys. When you need to add new examples +// for the docs you should try to convert an existing test first. Just add +// the comments in whatever file the test already lives in. If you have to +// write a new test to make an example in the docs then put it in whatever +// file matches its "theme" best. Put it next to similar tests. Not here. + +// Also! When Nik originally extracted examples from the docs to make them +// testable he didn't spend a lot of time putting the docs into appropriate +// files. He just made this one. He didn't put his toys away. We'd be better +// off not adding to this strange toy-pile and instead moving things into +// the appropriate files. docsKeep // tag::keep[] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/drop.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/drop.csv-spec index 601b4f329f9d7..fcb145ee55cbf 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/drop.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/drop.csv-spec @@ -1,3 +1,23 @@ +height +// tag::height[] +FROM employees +| DROP height +// end::height[] +| LIMIT 0; + +avg_worked_seconds:long | birth_date:date | emp_no:integer | first_name:keyword | gender:keyword | height.float:double | height.half_float:double | height.scaled_float:double | hire_date:date | is_rehired:boolean | job_positions:keyword | languages:integer | languages.byte:integer | languages.long:long | languages.short:integer | last_name:keyword | salary:integer | salary_change:double | salary_change.int:integer |salary_change.keyword:keyword |salary_change.long:long | still_hired:boolean +; + +heightWithWildcard +// tag::heightWithWildcard[] +FROM employees +| DROP height* +// end::heightWithWildcard[] +| LIMIT 0; + +avg_worked_seconds:long | birth_date:date | emp_no:integer | first_name:keyword | gender:keyword | hire_date:date | is_rehired:boolean | job_positions:keyword | languages:integer | languages.byte:integer | languages.long:long | languages.short:integer | last_name:keyword | salary:integer | salary_change:double | salary_change.int:integer |salary_change.keyword:keyword |salary_change.long:long | still_hired:boolean +; + sortWithLimitOne_DropHeight from employees | sort languages, emp_no | limit 1 | drop height*; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/limit.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/limit.csv-spec new file mode 100644 index 0000000000000..b13ed51e620c4 --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/limit.csv-spec @@ -0,0 +1,16 @@ +basic +// tag::basic[] +FROM employees +| SORT emp_no ASC +| LIMIT 5 +// end::basic[] +| KEEP emp_no +; + +emp_no:integer +10001 +10002 +10003 +10004 +10005 +; From 37498423728fc89a6b2900d09068ab00fe6953be Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 6 Feb 2024 17:45:06 +0000 Subject: [PATCH 076/106] Bump versions after 7.17.18 release --- .buildkite/pipelines/intake.yml | 2 +- .buildkite/pipelines/periodic-packaging.yml | 16 ++++++++++++++++ .buildkite/pipelines/periodic.yml | 10 ++++++++++ .ci/bwcVersions | 1 + .ci/snapshotBwcVersions | 2 +- .../src/main/java/org/elasticsearch/Version.java | 1 + .../org/elasticsearch/TransportVersions.csv | 1 + .../org/elasticsearch/index/IndexVersions.csv | 1 + 8 files changed, 32 insertions(+), 2 deletions(-) diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 00af4d006d0ac..102542d58bb0b 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -48,7 +48,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["7.17.18", "8.12.1", "8.13.0"] + BWC_VERSION: ["7.17.19", "8.12.1", "8.13.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 30d4f4486dad5..6f1c42654084f 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -1121,6 +1121,22 @@ steps: env: BWC_VERSION: 7.17.18 + - label: "{{matrix.image}} / 7.17.19 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.19 + timeout_in_minutes: 300 + matrix: + setup: + image: + - rocky-8 + - ubuntu-2004 + agents: + provider: gcp + image: family/elasticsearch-{{matrix.image}} + machineType: custom-16-32768 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: 7.17.19 + - label: "{{matrix.image}} / 8.0.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.0.0 timeout_in_minutes: 300 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 44007272f8954..6af4955e64a3b 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -682,6 +682,16 @@ steps: buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.17.18 + - label: 7.17.19 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.17.19#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: 7.17.19 - label: 8.0.0 / bwc command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.0.0#bwcTest timeout_in_minutes: 300 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 3871c6d06fd23..af3ade0589a5c 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -67,6 +67,7 @@ BWC_VERSION: - "7.17.16" - "7.17.17" - "7.17.18" + - "7.17.19" - "8.0.0" - "8.0.1" - "8.1.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index 36c0eb5a2999c..bc9f62a267871 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,4 +1,4 @@ BWC_VERSION: - - "7.17.18" + - "7.17.19" - "8.12.1" - "8.13.0" diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 3a119d31d5dc3..2ff622605202f 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -118,6 +118,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_7_17_16 = new Version(7_17_16_99); public static final Version V_7_17_17 = new Version(7_17_17_99); public static final Version V_7_17_18 = new Version(7_17_18_99); + public static final Version V_7_17_19 = new Version(7_17_19_99); public static final Version V_8_0_0 = new Version(8_00_00_99); public static final Version V_8_0_1 = new Version(8_00_01_99); diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv index ad2c89d18b70a..1d02879789c22 100644 --- a/server/src/main/resources/org/elasticsearch/TransportVersions.csv +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv @@ -65,6 +65,7 @@ 7.17.15,7171599 7.17.16,7171699 7.17.17,7171799 +7.17.18,7171899 8.0.0,8000099 8.0.1,8000199 8.1.0,8010099 diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv index 644cc362d3d4c..a351fe2b38c1d 100644 --- a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv @@ -65,6 +65,7 @@ 7.17.15,7171599 7.17.16,7171699 7.17.17,7171799 +7.17.18,7171899 8.0.0,8000099 8.0.1,8000199 8.1.0,8010099 From fef2af3b0705bf174611e9de8ac20629a901bfdf Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 6 Feb 2024 17:50:06 +0000 Subject: [PATCH 077/106] Prune changelogs after 7.17.18 release --- docs/changelog/103511.yaml | 6 ------ docs/changelog/103741.yaml | 5 ----- docs/changelog/103817.yaml | 6 ------ docs/changelog/103819.yaml | 5 ----- docs/changelog/104087.yaml | 13 ------------- docs/changelog/104145.yaml | 6 ------ docs/changelog/104198.yaml | 5 ----- docs/changelog/104281.yaml | 5 ----- docs/changelog/104288.yaml | 6 ------ docs/changelog/104289.yaml | 6 ------ docs/changelog/104314.yaml | 5 ----- docs/changelog/104418.yaml | 6 ------ docs/changelog/104523.yaml | 5 ----- docs/changelog/104585.yaml | 6 ------ docs/changelog/104586.yaml | 6 ------ docs/changelog/104591.yaml | 5 ----- docs/changelog/104600.yaml | 5 ----- docs/changelog/104606.yaml | 6 ------ docs/changelog/104722.yaml | 6 ------ docs/changelog/104802.yaml | 5 ----- docs/changelog/104808.yaml | 5 ----- docs/changelog/104832.yaml | 6 ------ docs/changelog/104891.yaml | 6 ------ docs/changelog/104904.yaml | 5 ----- 24 files changed, 140 deletions(-) delete mode 100644 docs/changelog/103511.yaml delete mode 100644 docs/changelog/103741.yaml delete mode 100644 docs/changelog/103817.yaml delete mode 100644 docs/changelog/103819.yaml delete mode 100644 docs/changelog/104087.yaml delete mode 100644 docs/changelog/104145.yaml delete mode 100644 docs/changelog/104198.yaml delete mode 100644 docs/changelog/104281.yaml delete mode 100644 docs/changelog/104288.yaml delete mode 100644 docs/changelog/104289.yaml delete mode 100644 docs/changelog/104314.yaml delete mode 100644 docs/changelog/104418.yaml delete mode 100644 docs/changelog/104523.yaml delete mode 100644 docs/changelog/104585.yaml delete mode 100644 docs/changelog/104586.yaml delete mode 100644 docs/changelog/104591.yaml delete mode 100644 docs/changelog/104600.yaml delete mode 100644 docs/changelog/104606.yaml delete mode 100644 docs/changelog/104722.yaml delete mode 100644 docs/changelog/104802.yaml delete mode 100644 docs/changelog/104808.yaml delete mode 100644 docs/changelog/104832.yaml delete mode 100644 docs/changelog/104891.yaml delete mode 100644 docs/changelog/104904.yaml diff --git a/docs/changelog/103511.yaml b/docs/changelog/103511.yaml deleted file mode 100644 index 20a48df914832..0000000000000 --- a/docs/changelog/103511.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103511 -summary: Downsampling supports `date_histogram` with tz -area: Downsampling -type: bug -issues: - - 101309 diff --git a/docs/changelog/103741.yaml b/docs/changelog/103741.yaml deleted file mode 100644 index 6771ddd329f46..0000000000000 --- a/docs/changelog/103741.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103741 -summary: Limit nesting depth in Exception XContent -area: Infra/Resiliency -type: bug -issues: [] diff --git a/docs/changelog/103817.yaml b/docs/changelog/103817.yaml deleted file mode 100644 index ff8978f1d3776..0000000000000 --- a/docs/changelog/103817.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 103817 -summary: Fix deleting index during snapshot finalization -area: Snapshot/Restore -type: bug -issues: - - 101029 diff --git a/docs/changelog/103819.yaml b/docs/changelog/103819.yaml deleted file mode 100644 index ef6e717572cc5..0000000000000 --- a/docs/changelog/103819.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103819 -summary: Add retry logic for 500 and 503 errors for OpenAI -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/104087.yaml b/docs/changelog/104087.yaml deleted file mode 100644 index 614e2d0de7e58..0000000000000 --- a/docs/changelog/104087.yaml +++ /dev/null @@ -1,13 +0,0 @@ -pr: 104087 -summary: Deprecate machine learning on Intel macOS -area: Machine Learning -type: deprecation -issues: [] -deprecation: - title: Deprecate machine learning on Intel macOS - area: Packaging - details: The machine learning plugin will be permanently disabled on macOS x86_64 - in new minor versions released from December 2024 onwards. - impact: To continue to use machine learning functionality on macOS please switch to - an arm64 machine (Apple silicon). Alternatively, it will still be possible to run - Elasticsearch with machine learning enabled in a Docker container on macOS x86_64. diff --git a/docs/changelog/104145.yaml b/docs/changelog/104145.yaml deleted file mode 100644 index 41dd1f97ebe8b..0000000000000 --- a/docs/changelog/104145.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104145 -summary: Fix _alias/ returning non-matching data streams -area: Data streams -type: bug -issues: - - 96589 diff --git a/docs/changelog/104198.yaml b/docs/changelog/104198.yaml deleted file mode 100644 index 0b5b4680c2d88..0000000000000 --- a/docs/changelog/104198.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104198 -summary: "[Connector API] Fix bug in configuration validation parser" -area: Application -type: bug -issues: [] diff --git a/docs/changelog/104281.yaml b/docs/changelog/104281.yaml deleted file mode 100644 index 087e91d83ab3b..0000000000000 --- a/docs/changelog/104281.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104281 -summary: Data streams fix failure store delete -area: Data streams -type: bug -issues: [] diff --git a/docs/changelog/104288.yaml b/docs/changelog/104288.yaml deleted file mode 100644 index 67f54e37cf9dc..0000000000000 --- a/docs/changelog/104288.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104288 -summary: Don't throw error for remote shards that open PIT filtered out -area: Search -type: bug -issues: - - 102596 diff --git a/docs/changelog/104289.yaml b/docs/changelog/104289.yaml deleted file mode 100644 index 9df8f8ecd4add..0000000000000 --- a/docs/changelog/104289.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104289 -summary: Better handling of async processor failures -area: Ingest Node -type: bug -issues: - - 101921 diff --git a/docs/changelog/104314.yaml b/docs/changelog/104314.yaml deleted file mode 100644 index a17e810a2c023..0000000000000 --- a/docs/changelog/104314.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104314 -summary: "[LTR] `FieldValueExtrator` - Checking if fetched values is empty" -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/104418.yaml b/docs/changelog/104418.yaml deleted file mode 100644 index d27b66cebea87..0000000000000 --- a/docs/changelog/104418.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104418 -summary: Fix `routing_path` when template has multiple `path_match` and multi-fields -area: TSDB -type: bug -issues: - - 104400 diff --git a/docs/changelog/104523.yaml b/docs/changelog/104523.yaml deleted file mode 100644 index d9e7d207dc23a..0000000000000 --- a/docs/changelog/104523.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104523 -summary: "ESQL: Allow grouping by null blocks" -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/104585.yaml b/docs/changelog/104585.yaml deleted file mode 100644 index 8c2b20fe54d0c..0000000000000 --- a/docs/changelog/104585.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104585 -summary: Ingest correctly handle upsert operations and drop processors together -area: Ingest Node -type: bug -issues: - - 36746 diff --git a/docs/changelog/104586.yaml b/docs/changelog/104586.yaml deleted file mode 100644 index db1d01c22eff6..0000000000000 --- a/docs/changelog/104586.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104586 -summary: Reduce the number of Evals `ReplaceMissingFieldWithNull` creates -area: ES|QL -type: bug -issues: - - 104583 diff --git a/docs/changelog/104591.yaml b/docs/changelog/104591.yaml deleted file mode 100644 index 0bd054385753f..0000000000000 --- a/docs/changelog/104591.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104591 -summary: Avoid execute ESQL planning on refresh thread -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/104600.yaml b/docs/changelog/104600.yaml deleted file mode 100644 index 5337116ba37bc..0000000000000 --- a/docs/changelog/104600.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104600 -summary: "[Profiling] Query in parallel on content nodes" -area: Application -type: bug -issues: [] diff --git a/docs/changelog/104606.yaml b/docs/changelog/104606.yaml deleted file mode 100644 index f419c21e0a17d..0000000000000 --- a/docs/changelog/104606.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104606 -summary: Fix bug when `latest` transform is used together with `from` parameter -area: Transform -type: bug -issues: - - 104543 diff --git a/docs/changelog/104722.yaml b/docs/changelog/104722.yaml deleted file mode 100644 index ed9f2d41ff908..0000000000000 --- a/docs/changelog/104722.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104722 -summary: Avoid possible datafeed infinite loop with filtering aggregations -area: Machine Learning -type: bug -issues: - - 104699 diff --git a/docs/changelog/104802.yaml b/docs/changelog/104802.yaml deleted file mode 100644 index d535318043ca2..0000000000000 --- a/docs/changelog/104802.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104802 -summary: "[Connectors API] Fix bug when triggering a sync job via API" -area: Application -type: bug -issues: [] diff --git a/docs/changelog/104808.yaml b/docs/changelog/104808.yaml deleted file mode 100644 index 7682db085c7a9..0000000000000 --- a/docs/changelog/104808.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104808 -summary: Fix lost headers with chunked responses -area: Network -type: bug -issues: [] diff --git a/docs/changelog/104832.yaml b/docs/changelog/104832.yaml deleted file mode 100644 index 89f837b1c3475..0000000000000 --- a/docs/changelog/104832.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104832 -summary: Limit concurrent shards per node for ESQL -area: ES|QL -type: bug -issues: - - 103666 diff --git a/docs/changelog/104891.yaml b/docs/changelog/104891.yaml deleted file mode 100644 index 690f2c4b11f88..0000000000000 --- a/docs/changelog/104891.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 104891 -summary: "ESQL: Fix `SearchStats#count(String)` to count values not rows" -area: ES|QL -type: bug -issues: - - 104795 diff --git a/docs/changelog/104904.yaml b/docs/changelog/104904.yaml deleted file mode 100644 index 07e22feb144ed..0000000000000 --- a/docs/changelog/104904.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 104904 -summary: Improve `CANNOT_REBALANCE_CAN_ALLOCATE` explanation -area: Allocation -type: bug -issues: [] From 47828788d97bd8f1e7f6ef2978d7596706d795c0 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 6 Feb 2024 18:00:58 +0000 Subject: [PATCH 078/106] [ML] Fix handling surrogate pairs in the XLM Roberta tokenizer (#105183) UTF16 represents some characters as surrogate pairs which are represented by 2 UTF16 characters, often emojis are encoded as surrogate pairs. This PR fixes an error in calculating the number of bytes required to convert a UTF16 string to UTF8 as surrogate pairs were not processed properly --- docs/changelog/105183.yaml | 7 +++ .../PrecompiledCharMapNormalizer.java | 24 ++++++----- .../nlp/tokenizers/TokenizerUtils.java | 13 ------ .../nlp/tokenizers/UnigramTokenizer.java | 12 ++++-- .../PrecompiledCharMapNormalizerTests.java | 11 +++++ .../tokenizers/XLMRobertaTokenizerTests.java | 43 +++++++++++++++++++ 6 files changed, 82 insertions(+), 28 deletions(-) create mode 100644 docs/changelog/105183.yaml diff --git a/docs/changelog/105183.yaml b/docs/changelog/105183.yaml new file mode 100644 index 0000000000000..04ec159cf02d0 --- /dev/null +++ b/docs/changelog/105183.yaml @@ -0,0 +1,7 @@ +pr: 105183 +summary: Fix handling surrogate pairs in the XLM Roberta tokenizer +area: Machine Learning +type: bug +issues: + - 104626 + - 104981 diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java index c6eca511b895e..836c9a78f19d9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java @@ -33,8 +33,6 @@ import java.util.Optional; import java.util.OptionalInt; -import static org.elasticsearch.xpack.ml.inference.nlp.tokenizers.TokenizerUtils.numUtf8Bytes; - /** * This is custom normalizer logic purpose built to replicate the logic in DoubleArray Trie System (darts) * object and the sentence piece normalizer. @@ -179,19 +177,14 @@ Reader normalize(CharSequence str) { b.setText(str); // We iterate the whole string, so b.first() is always `0` int startIter = b.first(); - int codePointPos = 0; CharsRefBuilder strBuilder = new CharsRefBuilder(); strBuilder.grow(strBytes.length); int bytePos = 0; int normalizedCharPos = 0; // Keep in mind, these break points aren't necessarily surrogate pairs, but also codepoints that contain a combining mark for (int end = b.next(); end != BreakIterator.DONE; startIter = end, end = b.next()) { - int byteLen = 0; - int numCp = Character.codePointCount(str, startIter, end); - for (int i = codePointPos; i < numCp + codePointPos; i++) { - byteLen += numUtf8Bytes(strCp[i]); - } - codePointPos += numCp; + int byteLen = UnicodeUtil.calcUTF16toUTF8Length(str, startIter, end - startIter); + // The trie only go up to a depth of 5 bytes. // So even looking at it for graphemes (with combining, surrogate, etc.) that are 6+ bytes in length is useless. if (byteLen < 6) { @@ -209,8 +202,12 @@ Reader normalize(CharSequence str) { } } int charByteIndex = 0; - for (int i = startIter; i < end; i++) { - int utf8CharBytes = numUtf8Bytes(str.charAt(i)); + int i = startIter; + while (i < end) { + boolean isSurrogatePair = (i + 1 < end && Character.isSurrogatePair(str.charAt(i), str.charAt(i + 1))); + int numUtf16Chars = isSurrogatePair ? 2 : 1; + + int utf8CharBytes = UnicodeUtil.calcUTF16toUTF8Length(str, i, numUtf16Chars); Optional maybeSubStr = normalizePart(strBytes, charByteIndex + bytePos, utf8CharBytes); if (maybeSubStr.isPresent()) { BytesRef subStr = maybeSubStr.get(); @@ -226,8 +223,13 @@ Reader normalize(CharSequence str) { } else { normalizedCharPos += 1; strBuilder.append(str.charAt(i)); + if (isSurrogatePair) { + strBuilder.append(str.charAt(i + 1)); + } } charByteIndex += utf8CharBytes; + + i = i + numUtf16Chars; } bytePos += byteLen; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenizerUtils.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenizerUtils.java index 3221144983945..9fc2f31514650 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenizerUtils.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenizerUtils.java @@ -64,19 +64,6 @@ static LinkedList splitOutNeverSplit(CharSequence input, CharTri return bigTokens; } - static int numUtf8Bytes(int c) { - if (c < 128) { - return 1; - } - if (c < 2048) { - return 2; - } - if (c < 65536) { - return 3; - } - return 4; - } - public record CharSequenceRef(CharSequence wrapped, int offset, int len) implements CharSequence { public int getOffset() { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/UnigramTokenizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/UnigramTokenizer.java index 58526a53dca6a..acb1f6c038ef9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/UnigramTokenizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/UnigramTokenizer.java @@ -30,7 +30,6 @@ import java.util.Optional; import static org.elasticsearch.core.Strings.format; -import static org.elasticsearch.xpack.ml.inference.nlp.tokenizers.TokenizerUtils.numUtf8Bytes; import static org.elasticsearch.xpack.ml.inference.nlp.tokenizers.TokenizerUtils.splitOutNeverSplit; /** @@ -256,9 +255,14 @@ List tokenize(CharSequence inputSequence, IntToIntFuncti BestPathNode[] bestPathNodes = new BestPathNode[numBytes + 1]; int bytePos = 0; int charPos = 0; - while (bytePos < numBytes) { + while (charPos < inputSequence.length()) { double bestScoreTillHere = bestPathNodes[bytePos] == null ? 0 : bestPathNodes[bytePos].score; - int mblen = numUtf8Bytes(inputSequence.charAt(charPos)); + + boolean isSurrogatePair = (charPos + 1 < inputSequence.length() + && Character.isSurrogatePair(inputSequence.charAt(charPos), inputSequence.charAt(charPos + 1))); + int numUtf16Chars = isSurrogatePair ? 2 : 1; + int mblen = UnicodeUtil.calcUTF16toUTF8Length(inputSequence, charPos, numUtf16Chars); + boolean hasSingleNode = false; // Find the matching prefixes, incrementing by the chars, each time for (BytesRef prefix : vocabTrie.matchingPrefixes(new BytesRef(normalizedByteBuffer, bytePos, numBytes - bytePos))) { @@ -295,7 +299,7 @@ List tokenize(CharSequence inputSequence, IntToIntFuncti } // Move our prefix search to the next char bytePos += mblen; - ++charPos; + charPos = charPos + numUtf16Chars; } int endsAtBytes = numBytes; int endsAtChars = inputSequence.length(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizerTests.java index 53e55538e9d18..d542b97eee192 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizerTests.java @@ -46,6 +46,17 @@ public void testWhitespaceScenario() throws IOException { assertNormalization("​​από", parsed, " από"); } + public void testSurrogatePairScenario() throws IOException { + PrecompiledCharMapNormalizer.Config parsed = loadTestCharMap(); + assertNormalization("🇸🇴", parsed, "🇸🇴"); + assertNormalization("🇸🇴", parsed, "\uD83C\uDDF8\uD83C\uDDF4"); + } + + public void testEmoji() throws IOException { + PrecompiledCharMapNormalizer.Config parsed = loadTestCharMap(); + assertNormalization("😀", parsed, "😀"); + } + private void assertNormalization(String input, PrecompiledCharMapNormalizer.Config config, String expected) throws IOException { PrecompiledCharMapNormalizer normalizer = new PrecompiledCharMapNormalizer( config.offsets(), diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTokenizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTokenizerTests.java index c49a8fea15780..1ac0f0d536988 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTokenizerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTokenizerTests.java @@ -17,6 +17,8 @@ import java.util.stream.Collectors; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; public class XLMRobertaTokenizerTests extends ESTestCase { @@ -37,6 +39,8 @@ public class XLMRobertaTokenizerTests extends ESTestCase { "▁little", "▁red", "▁car", + "▁😀", + "▁🇸🇴", "", "." ); @@ -57,6 +61,8 @@ public class XLMRobertaTokenizerTests extends ESTestCase { -11.451579093933105, -10.858806610107422, -10.214239120483398, + -10.230172157287598, + -9.451579093933105, 0.0, -3.0 ); @@ -81,6 +87,43 @@ public void testTokenize() throws IOException { } } + public void testSurrogatePair() throws IOException { + try ( + XLMRobertaTokenizer tokenizer = XLMRobertaTokenizer.builder( + TEST_CASE_VOCAB, + TEST_CASE_SCORES, + new XLMRobertaTokenization(false, null, Tokenization.Truncate.NONE, -1) + ).build() + ) { + TokenizationResult.Tokens tokenization = tokenizer.tokenize("😀", Tokenization.Truncate.NONE, -1, 0).get(0); + assertThat(tokenStrings(tokenization.tokens().get(0)), contains("▁\uD83D\uDE00")); + + tokenization = tokenizer.tokenize("Elasticsearch 😀", Tokenization.Truncate.NONE, -1, 0).get(0); + assertThat(tokenStrings(tokenization.tokens().get(0)), contains("▁Ela", "stic", "search", "▁\uD83D\uDE00")); + + tokenization = tokenizer.tokenize("Elasticsearch 😀 fun", Tokenization.Truncate.NONE, -1, 0).get(0); + assertThat(tokenStrings(tokenization.tokens().get(0)), contains("▁Ela", "stic", "search", "▁\uD83D\uDE00", "▁fun")); + } + } + + public void testMultiByteEmoji() throws IOException { + try ( + XLMRobertaTokenizer tokenizer = XLMRobertaTokenizer.builder( + TEST_CASE_VOCAB, + TEST_CASE_SCORES, + new XLMRobertaTokenization(false, null, Tokenization.Truncate.NONE, -1) + ).build() + ) { + TokenizationResult.Tokens tokenization = tokenizer.tokenize("🇸🇴", Tokenization.Truncate.NONE, -1, 0).get(0); + assertThat(tokenStrings(tokenization.tokens().get(0)), contains("▁🇸🇴")); + assertThat(tokenization.tokenIds()[0], not(equalTo(3))); // not the unknown token + + tokenization = tokenizer.tokenize("🏁", Tokenization.Truncate.NONE, -1, 0).get(0); + assertThat(tokenStrings(tokenization.tokens().get(0)), contains("▁🏁")); + assertThat(tokenization.tokenIds()[0], equalTo(3)); // the unknown token (not in the vocabulary) + } + } + public void testTokenizeWithNeverSplit() throws IOException { try ( XLMRobertaTokenizer tokenizer = XLMRobertaTokenizer.builder( From 0fb5dee75bf7bf5fbffa8f6105cde042601d2ae2 Mon Sep 17 00:00:00 2001 From: Fang Xing <155562079+fang-xing-esql@users.noreply.github.com> Date: Tue, 6 Feb 2024 13:03:53 -0500 Subject: [PATCH 079/106] [ES|QL] Add function log(base, value) (#104913) Add a new scalar function log --- docs/reference/esql/functions/log.asciidoc | 48 +++ .../esql/functions/math-functions.asciidoc | 2 + .../esql/functions/signature/log.svg | 1 + .../esql/functions/types/log.asciidoc | 20 ++ .../src/main/resources/math.csv-spec | 303 +++++++++++++++++- .../src/main/resources/show.csv-spec | 4 +- .../scalar/math/LogConstantEvaluator.java | 119 +++++++ .../function/scalar/math/LogEvaluator.java | 143 +++++++++ .../function/EsqlFunctionRegistry.java | 2 + .../expression/function/scalar/math/Log.java | 138 ++++++++ .../xpack/esql/io/stream/PlanNamedTypes.java | 14 + .../function/scalar/math/LogTests.java | 212 ++++++++++++ 12 files changed, 1004 insertions(+), 2 deletions(-) create mode 100644 docs/reference/esql/functions/log.asciidoc create mode 100644 docs/reference/esql/functions/signature/log.svg create mode 100644 docs/reference/esql/functions/types/log.asciidoc create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogConstantEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogEvaluator.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java create mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogTests.java diff --git a/docs/reference/esql/functions/log.asciidoc b/docs/reference/esql/functions/log.asciidoc new file mode 100644 index 0000000000000..79ea72898bc2f --- /dev/null +++ b/docs/reference/esql/functions/log.asciidoc @@ -0,0 +1,48 @@ +[discrete] +[[esql-log]] +=== `LOG` + +*Syntax* + +[source,esql] +---- +LOG([base,] value) +---- + +*Parameters* + +`base`:: +Numeric expression. If `null`, the function returns `null`. The base is an optional input parameter. If a base is not provided, this function returns the natural logarithm (base e) of a value. + +`value`:: +Numeric expression. If `null`, the function returns `null`. + +*Description* + +Returns the logarithm of a value to a base. The input can be any numeric value, the return value is always a double. + +Logs of zero, negative numbers, infinites and base of one return `null` as well as a warning. + +*Supported types* + +include::types/log.asciidoc[] + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/math.csv-spec[tag=log] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/math.csv-spec[tag=log-result] +|=== + +[source.merge.styled,esql] +---- +include::{esql-specs}/math.csv-spec[tag=logUnary] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/math.csv-spec[tag=logUnary-result] +|=== diff --git a/docs/reference/esql/functions/math-functions.asciidoc b/docs/reference/esql/functions/math-functions.asciidoc index 21131ae9074d7..0ddf7412db2a1 100644 --- a/docs/reference/esql/functions/math-functions.asciidoc +++ b/docs/reference/esql/functions/math-functions.asciidoc @@ -18,6 +18,7 @@ * <> * <> * <> +* <> * <> * <> * <> @@ -40,6 +41,7 @@ include::cos.asciidoc[] include::cosh.asciidoc[] include::e.asciidoc[] include::floor.asciidoc[] +include::log.asciidoc[] include::log10.asciidoc[] include::pi.asciidoc[] include::pow.asciidoc[] diff --git a/docs/reference/esql/functions/signature/log.svg b/docs/reference/esql/functions/signature/log.svg new file mode 100644 index 0000000000000..39a9a7e8dc52e --- /dev/null +++ b/docs/reference/esql/functions/signature/log.svg @@ -0,0 +1 @@ +LOG(base,value) \ No newline at end of file diff --git a/docs/reference/esql/functions/types/log.asciidoc b/docs/reference/esql/functions/types/log.asciidoc new file mode 100644 index 0000000000000..d72ea848c349f --- /dev/null +++ b/docs/reference/esql/functions/types/log.asciidoc @@ -0,0 +1,20 @@ +[%header.monospaced.styled,format=dsv,separator=|] +|=== +base | value | result +double | double | double +double | integer | double +double | long | double +double | unsigned_long | double +integer | double | double +integer | integer | double +integer | long | double +integer | unsigned_long | double +long | double | double +long | integer | double +long | long | double +long | unsigned_long | double +unsigned_long | double | double +unsigned_long | integer | double +unsigned_long | long | double +unsigned_long | unsigned_long | double +|=== diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec index d7a6f488e1a6e..ff61ee39d9f99 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec @@ -210,6 +210,307 @@ salary:integer | s:double 73851 | 1330201 ; +log#[skip:-8.12.99,reason:new scalar function added in 8.13] +// tag::log[] +ROW base = 2.0, value = 8.0 +| EVAL s = LOG(base, value) +// end::log[] +; + +// tag::log-result[] +base: double | value: double | s:double +2.0 | 8.0 |3.0 +// end::log-result[] +; + +logofNegativeValue#[skip:-8.12.99,reason:new scalar function added in 8.13] +row base = 2.0, value = -2 +| EVAL s = LOG(base, value); + +warning:Line 2:12: evaluation of [LOG(base, value)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 2:12: java.lang.ArithmeticException: Log of non-positive number + +base: double | value: integer | s:double +2.0 | -2 | null +; + +logofNegativeBase#[skip:-8.12.99,reason:new scalar function added in 8.13] +row base = -2, value = 2.0 +| EVAL s = LOG(base, value); + +warning:Line 2:12: evaluation of [LOG(base, value)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 2:12: java.lang.ArithmeticException: Log of non-positive number + +base: integer | value: double | s:double +-2 | 2.0 | null +; + +logofBaseOne#[skip:-8.12.99,reason:new scalar function added in 8.13] +row base = 1, value = 2 +| EVAL s = LOG(base, value); + +warning:Line 2:12: evaluation of [LOG(base, value)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 2:12: java.lang.ArithmeticException: Log of base 1 + +base: integer | value: integer | s:double +1 | 2 | null +; + +logofZero#[skip:-8.12.99,reason:new scalar function added in 8.13] +row base = 2.0, value = 0.0 +| EVAL s = LOG(base, value); + +warning:Line 2:12: evaluation of [LOG(base, value)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 2:12: java.lang.ArithmeticException: Log of non-positive number + +base:double | value:double | s:double +2.0 | 0.0 |null +; + +logofNegativeZero#[skip:-8.12.99,reason:new scalar function added in 8.13] +row base = 2.0, value = -0.0 +| EVAL s = LOG(base, value); + +warning:Line 2:12: evaluation of [LOG(base, value)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 2:12: java.lang.ArithmeticException: Log of non-positive number + +base:double | value:double | s:double +2.0 | -0.0 |null +; + +logofIntLong#[skip:-8.12.99,reason:new scalar function added in 8.13] +row base = 10, value = to_long(1000000000000) +| EVAL s = LOG(base, value); + +base:integer | value:long | s:double +10 | 1000000000000 | 12 +; + +logofLongInt#[skip:-8.12.99,reason:new scalar function added in 8.13] +row base = to_long(1000000000000), value = 10 +| EVAL s = LOG(base, value); + +base:long | value:integer | s:double +1000000000000 | 10 | 0.08333333333333333 +; + +logofLongLong#[skip:-8.12.99,reason:new scalar function added in 8.13] +row base = to_long(1000000000000), value = to_long(1000000000000) +| EVAL s = LOG(base, value); + +base:long | value:long | s:double +1000000000000 | 1000000000000 |1.0 +; + +logofLongDouble#[skip:-8.12.99,reason:new scalar function added in 8.13] +row base = to_long(1000000000000), value = 10.0 +| EVAL s = LOG(base, value); + +base:long | value:double | s:double +1000000000000 | 10.0 | 0.08333333333333333 +; + +logofDoubleLong#[skip:-8.12.99,reason:new scalar function added in 8.13] +row base = 10.0, value = to_long(1000000000000) +| EVAL s = LOG(base, value); + +base:double | value:long | s:double +10.0 | 1000000000000 | 12 +; + +logofLongUnsignedLong#[skip:-8.12.99,reason:new scalar function added in 8.13] +row base = to_long(1000000000000), value = to_ul(1000000000000000000) +| EVAL s = LOG(base, value); + +base:long | value:UNSIGNED_LONG | s:double +1000000000000 | 1000000000000000000 | 1.5 +; + +logofUnsignedLongLong#[skip:-8.12.99,reason:new scalar function added in 8.13] +row base = to_ul(1000000000000000000), value = to_long(1000000000000) +| EVAL s = LOG(base, value); + +base:UNSIGNED_LONG | value:long | s:double +1000000000000000000 | 1000000000000 | 0.6666666666666666 +; + +logofIntUnsignedLong#[skip:-8.12.99,reason:new scalar function added in 8.13] +row base = 10, value = to_ul(1000000000000000000) +| EVAL s = LOG(base, value); + +base:integer | value:UNSIGNED_LONG | s:double +10 | 1000000000000000000 | 18.0 +; + +logofUnsignedLongInt#[skip:-8.12.99,reason:new scalar function added in 8.13] +row base = to_ul(1000000000000000000), value = 10 +| EVAL s = LOG(base, value); + +base:UNSIGNED_LONG | value:integer | s:double +1000000000000000000 | 10 | 0.05555555555555555 +; + +logofUnsignedLongUnsignedLong#[skip:-8.12.99,reason:new scalar function added in 8.13] +row base = to_ul(1000000000000000000), value = to_ul(1000000000000000000) +| EVAL s = LOG(base, value); + +base:UNSIGNED_LONG | value:UNSIGNED_LONG | s:double +1000000000000000000 | 1000000000000000000 | 1.0 +; + +logofUnsignedLongDouble#[skip:-8.12.99,reason:new scalar function added in 8.13] +row base = to_ul(1000000000000000000), value = 1000000000.0 +| EVAL s = LOG(base, value); + +base:UNSIGNED_LONG | value:double | s:double +1000000000000000000 | 1.0E9 | 0.5 +; + +logofDoubleUnsignedLong#[skip:-8.12.99,reason:new scalar function added in 8.13] +row base = 10.0, value = to_ul(1000000000000000000) +| EVAL s = LOG(base, value); + +base:double | value:UNSIGNED_LONG | s:double +10.0 | 1000000000000000000 | 18.0 +; + +logofInt#[skip:-8.12.99,reason:new scalar function added in 8.13] +// tag::logUnary[] +row value = 100 +| EVAL s = LOG(value); +// end::logUnary[] + +// tag::logUnary-result[] +value: integer | s:double +100 | 4.605170185988092 +// end::logUnary-result[] +; + +logofLong#[skip:-8.12.99,reason:new scalar function added in 8.13] +row value = to_long(1000000000000) +| EVAL s = LOG(value); + +value: long | s:double +1000000000000 | 27.631021115928547 +; + +logofUnsignedLong#[skip:-8.12.99,reason:new scalar function added in 8.13] +row value = to_ul(1000000000000000000) +| EVAL s = LOG(value); + +value: unsigned_long | s:double +1000000000000000000 | 41.44653167389282 +; + +logofDouble#[skip:-8.12.99,reason:new scalar function added in 8.13] +row value = 1000000000000.0 +| EVAL s = LOG(value); + +value: double | s:double +1000000000000.0 | 27.631021115928547 +; + +logofNegativeUnary#[skip:-8.12.99,reason:new scalar function added in 8.13] +row value = -1 +| EVAL s = LOG(value); + +warning:Line 2:12: evaluation of [LOG(value)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 2:12: java.lang.ArithmeticException: Log of non-positive number + +value: integer | s:double +-1 | null +; + +logofZeroUnary#[skip:-8.12.99,reason:new scalar function added in 8.13] +row value = 0 +| EVAL s = LOG(value); + +warning:Line 2:12: evaluation of [LOG(value)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 2:12: java.lang.ArithmeticException: Log of non-positive number + +value: integer | s:double +0 | null +; + +logofRefs#[skip:-8.12.99,reason:new scalar function added in 8.13] +from employees +| EVAL l1 = LOG(languages, salary) +| EVAL l2 = LOG(avg_worked_seconds) +| EVAL l3 = l1 + l2 +| sort emp_no +| keep emp_no, languages, salary, avg_worked_seconds, l1, l2, l3 +| limit 5; + +warning:Line 2:13: evaluation of [LOG(languages, salary)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 2:13: java.lang.ArithmeticException: Log of base 1 + +emp_no:integer | languages:integer | salary:integer | avg_worked_seconds:long | l1:double | l2:double | l3:double +10001 | 2 | 57305 | 268728049 | 15.806373402659007 | 19.409210455930772 | 35.21558385858978 +10002 | 5 | 56371 | 328922887 | 6.797224072039991 | 19.61133389523871 | 26.408557967278703 +10003 | 4 | 61805 | 200296405 | 7.957717967928651 | 19.115308852397096 | 27.073026820325747 +10004 | 5 | 36174 | 311267831 | 6.5215910639530374 | 19.55616429245569 | 26.07775535640873 +10005 | 1 | 63528 | 244294991 | null | 19.313887032538442 | null +; + +logofRefExps#[skip:-8.12.99,reason:new scalar function added in 8.13] +from employees +| EVAL base = languages * 2 +| EVAL l1 = LOG(base, salary * 2) +| EVAL l2 = LOG(avg_worked_seconds + 10000) / 2 +| EVAL l3 = l1 + l2 +| sort emp_no +| keep emp_no, languages, salary, avg_worked_seconds, l1, l2, l3 +| limit 5; + +emp_no:integer | languages:integer | salary:integer | avg_worked_seconds:long | l1:double | l2:double | l3:double +10001 | 2 | 57305 | 268728049 | 8.403186701329505 | 9.704623833790084 | 18.10781053511959 +10002 | 5 | 56371 | 328922887 | 5.052085734770665 | 9.805682148519608 | 14.857767883290272 +10003 | 4 | 61805 | 200296405 | 5.638478645285767 | 9.55767938857962 | 15.196158033865387 +10004 | 5 | 36174 | 311267831 | 4.8594265299129775 | 9.778098209306606 | 14.637524739219582 +10005 | 1 | 63528 | 244294991 | 16.955104980216557 | 9.656963982909355 | 26.612068963125914 +; + +logofSort#[skip:-8.12.99,reason:new scalar function added in 8.13] +from employees +| EVAL l1 = LOG(languages, salary) +| EVAL l2 = LOG(avg_worked_seconds) +| EVAL l3 = l1 + l2 +| sort l1 ASC nulls last, l2 DESC nulls last +| keep l1, l2, emp_no, languages, salary, avg_worked_seconds, l3 +| limit 5; + +warning:Line 2:13: evaluation of [LOG(languages, salary)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 2:13: java.lang.ArithmeticException: Log of base 1 + +l1:double | l2:double | emp_no:integer | languages:integer | salary:integer | avg_worked_seconds:long | l3:double +6.300030441266983 | 19.782340222815456 | 10015 | 5 | 25324 | 390266432 | 26.08237066408244 +6.315083118944484 | 19.132836869278762 | 10035 | 5 | 25945 | 203838153 | 25.447919988223248 +6.428086413501791 | 19.294546217598917 | 10011 | 5 | 31120 | 239615525 | 25.72263263110071 +6.443409317219002 | 19.704129344210816 | 10066 | 5 | 31897 | 360906451 | 26.14753866142982 +6.450671492193451 | 19.538385836155943 | 10087 | 5 | 32272 | 305782871 | 25.989057328349393 +; + +logofRenameSort#[skip:-8.12.99,reason:new scalar function added in 8.13] +from employees +| RENAME languages as base1 +| EVAL l1 = LOG(base1, salary) +| EVAL l2 = LOG(avg_worked_seconds) +| EVAL l3 = l1 + l2 +| sort l1 ASC nulls first, l2 DESC nulls first +| keep l1, l2, emp_no, base1, salary, avg_worked_seconds, l3 +| limit 5; + +warning:Line 3:13: evaluation of [LOG(base1, salary)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 3:13: java.lang.ArithmeticException: Log of base 1 + +l1:double | l2:double | emp_no:integer | base1:integer | salary:integer | avg_worked_seconds:long | l3:double +null | 19.774989878141827 | 10044 | 1 | 39728 | 387408356 | null +null | 19.739867371666865 | 10027 | null | 73851 | 374037782 | null +null | 19.73791867352969 | 10020 | null | 40031 | 373309605 | null +null | 19.732442265367403 | 10025 | null | 47411 | 371270797 | null +null | 19.72282600331636 | 10024 | null | 64675 | 367717671 | null +; + log10 // tag::log10[] ROW d = 1000.0 @@ -238,7 +539,7 @@ warning:Line 1:24: evaluation of [log10(d)] failed, treating result as null. Onl warning:Line 1:24: java.lang.ArithmeticException: Log of non-positive number d:double | s:double -0.0 | null +0.0 | null ; log10ofNegativeZero diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec index 16a4ebf8fb03e..8d86563f78938 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec @@ -38,6 +38,7 @@ greatest |"integer|long|double|boolean|keyword|text|ip|version g least |"integer|long|double|boolean|keyword|text|ip|version least(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" |[first, rest] |["integer|long|double|boolean|keyword|text|ip|version", "integer|long|double|boolean|keyword|text|ip|version"] |["", ""] |"integer|long|double|boolean|keyword|text|ip|version" | "Returns the minimum value from many columns." | [false, false] | true | false left |"keyword left(str:keyword|text, length:integer)" |[str, length] |["keyword|text", "integer"] |["", ""] |keyword | "Return the substring that extracts length chars from the string starting from the left." | [false, false] | false | false length |"integer length(str:keyword|text)" |str |"keyword|text" | "" |integer | "Returns the character length of a string." | false | false | false +log |"double log(?base:integer|unsigned_long|long|double, value:integer|unsigned_long|long|double)" |[base, value] |["integer|unsigned_long|long|double", "integer|unsigned_long|long|double"]| ["", ""] |double | "Returns the logarithm of a value to a base." | [true, false] | false | false log10 |"double log10(n:double|integer|long|unsigned_long)" |n |"double|integer|long|unsigned_long" | "" |double | "Returns the log base 10." | false | false | false ltrim |"keyword|text ltrim(str:keyword|text)" |str |"keyword|text" | "" |"keyword|text" |Removes leading whitespaces from a string.| false | false | false max |"double|integer|long max(field:double|integer|long)" |field |"double|integer|long" | "" |"double|integer|long" | "The maximum value of a numeric field." | false | false | true @@ -134,6 +135,7 @@ double e() "integer|long|double|boolean|keyword|text|ip|version least(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" "keyword left(str:keyword|text, length:integer)" "integer length(str:keyword|text)" +"double log(?base:integer|unsigned_long|long|double, value:integer|unsigned_long|long|double)" "double log10(n:double|integer|long|unsigned_long)" "keyword|text ltrim(str:keyword|text)" "double|integer|long max(field:double|integer|long)" @@ -218,5 +220,5 @@ countFunctions#[skip:-8.12.99] show functions | stats a = count(*), b = count(*), c = count(*) | mv_expand c; a:long | b:long | c:long -89 | 89 | 89 +90 | 90 | 90 ; diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogConstantEvaluator.java new file mode 100644 index 0000000000000..ff814b530b108 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogConstantEvaluator.java @@ -0,0 +1,119 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Log}. + * This class is generated. Do not edit it. + */ +public final class LogConstantEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator value; + + private final DriverContext driverContext; + + public LogConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator value, + DriverContext driverContext) { + this.warnings = new Warnings(source); + this.value = value; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (DoubleBlock valueBlock = (DoubleBlock) value.eval(page)) { + DoubleVector valueVector = valueBlock.asVector(); + if (valueVector == null) { + return eval(page.getPositionCount(), valueBlock); + } + return eval(page.getPositionCount(), valueVector); + } + } + + public DoubleBlock eval(int positionCount, DoubleBlock valueBlock) { + try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (valueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valueBlock.getValueCount(p) != 1) { + if (valueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendDouble(Log.process(valueBlock.getDouble(valueBlock.getFirstValueIndex(p)))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public DoubleBlock eval(int positionCount, DoubleVector valueVector) { + try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendDouble(Log.process(valueVector.getDouble(p))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "LogConstantEvaluator[" + "value=" + value + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(value); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory value; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory value) { + this.source = source; + this.value = value; + } + + @Override + public LogConstantEvaluator get(DriverContext context) { + return new LogConstantEvaluator(source, value.get(context), context); + } + + @Override + public String toString() { + return "LogConstantEvaluator[" + "value=" + value + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogEvaluator.java new file mode 100644 index 0000000000000..7fcfb37483bb7 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogEvaluator.java @@ -0,0 +1,143 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.expression.function.Warnings; +import org.elasticsearch.xpack.ql.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link Log}. + * This class is generated. Do not edit it. + */ +public final class LogEvaluator implements EvalOperator.ExpressionEvaluator { + private final Warnings warnings; + + private final EvalOperator.ExpressionEvaluator base; + + private final EvalOperator.ExpressionEvaluator value; + + private final DriverContext driverContext; + + public LogEvaluator(Source source, EvalOperator.ExpressionEvaluator base, + EvalOperator.ExpressionEvaluator value, DriverContext driverContext) { + this.warnings = new Warnings(source); + this.base = base; + this.value = value; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (DoubleBlock baseBlock = (DoubleBlock) base.eval(page)) { + try (DoubleBlock valueBlock = (DoubleBlock) value.eval(page)) { + DoubleVector baseVector = baseBlock.asVector(); + if (baseVector == null) { + return eval(page.getPositionCount(), baseBlock, valueBlock); + } + DoubleVector valueVector = valueBlock.asVector(); + if (valueVector == null) { + return eval(page.getPositionCount(), baseBlock, valueBlock); + } + return eval(page.getPositionCount(), baseVector, valueVector); + } + } + } + + public DoubleBlock eval(int positionCount, DoubleBlock baseBlock, DoubleBlock valueBlock) { + try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (baseBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (baseBlock.getValueCount(p) != 1) { + if (baseBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + if (valueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valueBlock.getValueCount(p) != 1) { + if (valueBlock.getValueCount(p) > 1) { + warnings.registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendDouble(Log.process(baseBlock.getDouble(baseBlock.getFirstValueIndex(p)), valueBlock.getDouble(valueBlock.getFirstValueIndex(p)))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public DoubleBlock eval(int positionCount, DoubleVector baseVector, DoubleVector valueVector) { + try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendDouble(Log.process(baseVector.getDouble(p), valueVector.getDouble(p))); + } catch (ArithmeticException e) { + warnings.registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "LogEvaluator[" + "base=" + base + ", value=" + value + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(base, value); + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory base; + + private final EvalOperator.ExpressionEvaluator.Factory value; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory base, + EvalOperator.ExpressionEvaluator.Factory value) { + this.source = source; + this.base = base; + this.value = value; + } + + @Override + public LogEvaluator get(DriverContext context) { + return new LogEvaluator(source, base.get(context), value.get(context), context); + } + + @Override + public String toString() { + return "LogEvaluator[" + "base=" + base + ", value=" + value + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index fa00ec5430657..eb44f96db069b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -53,6 +53,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cosh; import org.elasticsearch.xpack.esql.expression.function.scalar.math.E; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Floor; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Log; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Log10; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pi; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pow; @@ -136,6 +137,7 @@ private FunctionDefinition[][] functions() { def(E.class, E::new, "e"), def(Floor.class, Floor::new, "floor"), def(Greatest.class, Greatest::new, "greatest"), + def(Log.class, Log::new, "log"), def(Log10.class, Log10::new, "log10"), def(Least.class, Least::new, "least"), def(Pi.class, Pi::new, "pi"), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java new file mode 100644 index 0000000000000..04aed20d12d79 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log.java @@ -0,0 +1,138 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.function.OptionalArgument; +import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; +import org.elasticsearch.xpack.ql.expression.gen.script.ScriptTemplate; +import org.elasticsearch.xpack.ql.tree.NodeInfo; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.util.Arrays; +import java.util.List; +import java.util.Objects; +import java.util.function.Function; + +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; +import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isNumeric; + +public class Log extends ScalarFunction implements OptionalArgument, EvaluatorMapper { + + private final Expression base, value; + + @FunctionInfo(returnType = "double", description = "Returns the logarithm of a value to a base.") + public Log( + Source source, + @Param(name = "base", type = { "integer", "unsigned_long", "long", "double" }, optional = true) Expression base, + @Param(name = "value", type = { "integer", "unsigned_long", "long", "double" }) Expression value + ) { + super(source, value != null ? Arrays.asList(base, value) : Arrays.asList(base)); + this.value = value != null ? value : base; + this.base = value != null ? base : null; + } + + @Override + protected TypeResolution resolveType() { + if (childrenResolved() == false) { + return new TypeResolution("Unresolved children"); + } + + if (base != null) { + TypeResolution resolution = isNumeric(base, sourceText(), FIRST); + if (resolution.unresolved()) { + return resolution; + } + } + + return isNumeric(value, sourceText(), base != null ? SECOND : FIRST); + } + + @Override + public boolean foldable() { + return (base == null || base.foldable()) && value.foldable(); + } + + @Override + public Object fold() { + return EvaluatorMapper.super.fold(); + } + + @Evaluator(extraName = "Constant", warnExceptions = { ArithmeticException.class }) + static double process(double value) throws ArithmeticException { + if (value <= 0d) { + throw new ArithmeticException("Log of non-positive number"); + } + return Math.log(value); + } + + @Evaluator(warnExceptions = { ArithmeticException.class }) + static double process(double base, double value) throws ArithmeticException { + if (base <= 0d || value <= 0d) { + throw new ArithmeticException("Log of non-positive number"); + } + if (base == 1d) { + throw new ArithmeticException("Log of base 1"); + } + return Math.log10(value) / Math.log10(base); + } + + @Override + public final Expression replaceChildren(List newChildren) { + return new Log(source(), newChildren.get(0), newChildren.size() > 1 ? newChildren.get(1) : null); + } + + @Override + protected NodeInfo info() { + Expression b = base != null ? base : value; + Expression v = base != null ? value : null; + return NodeInfo.create(this, Log::new, b, v); + } + + @Override + public DataType dataType() { + return DataTypes.DOUBLE; + } + + @Override + public ScriptTemplate asScript() { + throw new UnsupportedOperationException("functions do not support scripting"); + } + + @Override + public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { + var valueEval = Cast.cast(source(), value.dataType(), DataTypes.DOUBLE, toEvaluator.apply(value)); + if (base != null) { + var baseEval = Cast.cast(source(), base.dataType(), DataTypes.DOUBLE, toEvaluator.apply(base)); + return new LogEvaluator.Factory(source(), baseEval, valueEval); + } + return new LogConstantEvaluator.Factory(source(), valueEval); + } + + @Override + public int hashCode() { + return Objects.hash(base, value); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || obj.getClass() != getClass()) { + return false; + } + Log other = (Log) obj; + return Objects.equals(other.base, base) && Objects.equals(other.value, value); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index 277b101c53fe7..95892ac42e587 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -76,6 +76,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cosh; import org.elasticsearch.xpack.esql.expression.function.scalar.math.E; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Floor; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Log; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Log10; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pi; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pow; @@ -372,6 +373,7 @@ public static List namedTypeEntries() { of(ScalarFunction.class, E.class, PlanNamedTypes::writeNoArgScalar, PlanNamedTypes::readNoArgScalar), of(ScalarFunction.class, Greatest.class, PlanNamedTypes::writeVararg, PlanNamedTypes::readVarag), of(ScalarFunction.class, Least.class, PlanNamedTypes::writeVararg, PlanNamedTypes::readVarag), + of(ScalarFunction.class, Log.class, PlanNamedTypes::writeLog, PlanNamedTypes::readLog), of(ScalarFunction.class, Now.class, PlanNamedTypes::writeNow, PlanNamedTypes::readNow), of(ScalarFunction.class, Pi.class, PlanNamedTypes::writeNoArgScalar, PlanNamedTypes::readNoArgScalar), of(ScalarFunction.class, Round.class, PlanNamedTypes::writeRound, PlanNamedTypes::readRound), @@ -1811,4 +1813,16 @@ static void writeDissectParser(PlanStreamOutput out, Parser dissectParser) throw out.writeString(dissectParser.pattern()); out.writeString(dissectParser.appendSeparator()); } + + static Log readLog(PlanStreamInput in) throws IOException { + return new Log(in.readSource(), in.readExpression(), in.readOptionalNamed(Expression.class)); + } + + static void writeLog(PlanStreamOutput out, Log log) throws IOException { + out.writeSource(log.source()); + List fields = log.children(); + assert fields.size() == 1 || fields.size() == 2; + out.writeExpression(fields.get(0)); + out.writeOptionalWriteable(fields.size() == 2 ? o -> out.writeExpression(fields.get(1)) : null); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogTests.java new file mode 100644 index 0000000000000..9e74172323564 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogTests.java @@ -0,0 +1,212 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; + +import java.util.List; +import java.util.function.Supplier; + +public class LogTests extends AbstractScalarFunctionTestCase { + public LogTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + // Positive base > 1, value >= 1, + List suppliers = TestCaseSupplier.forBinaryCastingToDouble( + "LogEvaluator", + "base", + "value", + (b, l) -> Math.log10(l) / Math.log10(b), + 2d, + Double.POSITIVE_INFINITY, + 1d, + Double.POSITIVE_INFINITY, + List.of() + ); + + // Positive natural logarithm + suppliers.addAll( + TestCaseSupplier.forUnaryCastingToDouble( + "LogConstantEvaluator", + "value", + Math::log, + Math.nextUp(1d), + Double.POSITIVE_INFINITY, + List.of() + ) + ); + + TestCaseSupplier.forUnaryDouble( + suppliers, + "LogConstantEvaluator[value=Attribute[channel=0]]", + DataTypes.DOUBLE, + Math::log, + Math.nextUp(0d), + Math.nextDown(1d), + List.of() + ); + + // Positive 0 < base < 1, 0 < value < 1 + suppliers.addAll( + TestCaseSupplier.forBinaryCastingToDouble( + "LogEvaluator", + "base", + "value", + (b, l) -> Math.log10(l) / Math.log10(b), + List.of( + new TestCaseSupplier.TypedDataSupplier("", () -> Math.nextUp(0d), DataTypes.DOUBLE), + new TestCaseSupplier.TypedDataSupplier("", () -> Math.nextDown(1d), DataTypes.DOUBLE) + ), + List.of( + new TestCaseSupplier.TypedDataSupplier("", () -> Math.nextUp(0d), DataTypes.DOUBLE), + new TestCaseSupplier.TypedDataSupplier("", () -> Math.nextDown(1d), DataTypes.DOUBLE) + ), + List.of() + ) + ); + + // Negative base <=0, + suppliers.addAll( + TestCaseSupplier.forBinaryCastingToDouble( + "LogEvaluator", + "base", + "value", + (b, l) -> null, + Double.NEGATIVE_INFINITY, + 0d, + 1d, + Double.POSITIVE_INFINITY, + List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.ArithmeticException: Log of non-positive number" + ) + ) + ); + + // Negative value <=0, + suppliers.addAll( + TestCaseSupplier.forBinaryCastingToDouble( + "LogEvaluator", + "base", + "value", + (b, l) -> null, + 2d, + Double.POSITIVE_INFINITY, + Double.NEGATIVE_INFINITY, + 0d, + List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.ArithmeticException: Log of non-positive number" + ) + ) + ); + + // Negative base = 1 + suppliers.addAll( + TestCaseSupplier.forBinaryCastingToDouble( + "LogEvaluator", + "base", + "value", + (b, l) -> null, + 1d, + 1d, + 1d, + Double.POSITIVE_INFINITY, + List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.ArithmeticException: Log of base 1" + ) + ) + ); + + // Negative 0 < base < 1, value > 1 + suppliers.addAll( + TestCaseSupplier.forBinaryCastingToDouble( + "LogEvaluator", + "base", + "value", + (b, l) -> Math.log10(l) / Math.log10(b), + List.of( + new TestCaseSupplier.TypedDataSupplier("", () -> Math.nextUp(0d), DataTypes.DOUBLE), + new TestCaseSupplier.TypedDataSupplier("", () -> Math.nextDown(1d), DataTypes.DOUBLE) + ), + List.of( + new TestCaseSupplier.TypedDataSupplier("", () -> Math.nextUp(1d), DataTypes.DOUBLE), + new TestCaseSupplier.TypedDataSupplier("", () -> Double.MAX_VALUE, DataTypes.DOUBLE) + ), + List.of() + ) + ); + + // Negative base > 1, 0 < value < 1 + suppliers.addAll( + TestCaseSupplier.forBinaryCastingToDouble( + "LogEvaluator", + "base", + "value", + (b, l) -> Math.log10(l) / Math.log10(b), + List.of( + new TestCaseSupplier.TypedDataSupplier("", () -> Math.nextUp(1d), DataTypes.DOUBLE), + new TestCaseSupplier.TypedDataSupplier("", () -> Double.MAX_VALUE, DataTypes.DOUBLE) + ), + List.of( + new TestCaseSupplier.TypedDataSupplier("", () -> Math.nextUp(0d), DataTypes.DOUBLE), + new TestCaseSupplier.TypedDataSupplier("", () -> Math.nextDown(1d), DataTypes.DOUBLE) + ), + List.of() + ) + ); + + // Negative Unary value <=0 + suppliers.addAll( + TestCaseSupplier.forUnaryCastingToDouble( + "LogConstantEvaluator", + "value", + v -> null, + Double.NEGATIVE_INFINITY, + 0d, + List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: java.lang.ArithmeticException: Log of non-positive number" + ) + ) + ); + + // Add null cases before the rest of the error cases, so messages are correct. + suppliers = anyNullIsNull(true, suppliers); + + // Negative cases + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(suppliers)); + } + + @Override + protected DataType expectedType(List argTypes) { + return DataTypes.DOUBLE; + } + + @Override + protected List argSpec() { + return List.of(optional(numerics()), required(numerics())); + } + + @Override + protected Expression build(Source source, List args) { + return new Log(source, args.get(0), args.size() > 1 ? args.get(1) : null); + } +} From bb62f05c6d633f7898020960f2fa385d47c771cd Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Tue, 6 Feb 2024 18:03:07 +0000 Subject: [PATCH 080/106] Bump versions after 8.12.1 release --- .buildkite/pipelines/intake.yml | 2 +- .buildkite/pipelines/periodic-packaging.yml | 16 ++++++++++++++++ .buildkite/pipelines/periodic.yml | 10 ++++++++++ .ci/bwcVersions | 1 + .ci/snapshotBwcVersions | 2 +- .../src/main/java/org/elasticsearch/Version.java | 1 + .../org/elasticsearch/TransportVersions.csv | 1 + .../org/elasticsearch/index/IndexVersions.csv | 1 + 8 files changed, 32 insertions(+), 2 deletions(-) diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 102542d58bb0b..2bd91b7fe3739 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -48,7 +48,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["7.17.19", "8.12.1", "8.13.0"] + BWC_VERSION: ["7.17.19", "8.12.2", "8.13.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 6f1c42654084f..ed00a0655dbd8 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -1857,6 +1857,22 @@ steps: env: BWC_VERSION: 8.12.1 + - label: "{{matrix.image}} / 8.12.2 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.12.2 + timeout_in_minutes: 300 + matrix: + setup: + image: + - rocky-8 + - ubuntu-2004 + agents: + provider: gcp + image: family/elasticsearch-{{matrix.image}} + machineType: custom-16-32768 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: 8.12.2 + - label: "{{matrix.image}} / 8.13.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.13.0 timeout_in_minutes: 300 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 6af4955e64a3b..86dc3c216d060 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -1142,6 +1142,16 @@ steps: buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.12.1 + - label: 8.12.2 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.12.2#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: 8.12.2 - label: 8.13.0 / bwc command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.13.0#bwcTest timeout_in_minutes: 300 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index af3ade0589a5c..8ac1a60c9530c 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -113,4 +113,5 @@ BWC_VERSION: - "8.11.4" - "8.12.0" - "8.12.1" + - "8.12.2" - "8.13.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index bc9f62a267871..079f3565880e4 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,4 +1,4 @@ BWC_VERSION: - "7.17.19" - - "8.12.1" + - "8.12.2" - "8.13.0" diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 2ff622605202f..aee6f91b33edc 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -165,6 +165,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_11_4 = new Version(8_11_04_99); public static final Version V_8_12_0 = new Version(8_12_00_99); public static final Version V_8_12_1 = new Version(8_12_01_99); + public static final Version V_8_12_2 = new Version(8_12_02_99); public static final Version V_8_13_0 = new Version(8_13_00_99); public static final Version CURRENT = V_8_13_0; diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv index 1d02879789c22..8efe3b01eefd4 100644 --- a/server/src/main/resources/org/elasticsearch/TransportVersions.csv +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv @@ -110,3 +110,4 @@ 8.11.3,8512001 8.11.4,8512001 8.12.0,8560000 +8.12.1,8560001 diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv index a351fe2b38c1d..43220565ab871 100644 --- a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv @@ -110,3 +110,4 @@ 8.11.3,8500003 8.11.4,8500003 8.12.0,8500008 +8.12.1,8500010 From 8b445bfe677bad11dddbe1ded82b3e56211877e1 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Tue, 6 Feb 2024 10:13:24 -0800 Subject: [PATCH 081/106] Harden index mapping parameter check in enrich runner (#105096) There is a case where the mapper parser throws a MapperParsingException instead of not consuming the index:false parameter. We missed that case in the previous fix (see #98038). This PR hardens that check by returning false when hitting a MapperParsingException. Relates #98038 --- docs/changelog/105096.yaml | 5 ++ .../test/enrich/CommonEnrichRestTestCase.java | 47 +++++++++++++++++++ .../xpack/enrich/EnrichPolicyRunner.java | 17 +++++-- 3 files changed, 64 insertions(+), 5 deletions(-) create mode 100644 docs/changelog/105096.yaml diff --git a/docs/changelog/105096.yaml b/docs/changelog/105096.yaml new file mode 100644 index 0000000000000..bfc72a6277bb1 --- /dev/null +++ b/docs/changelog/105096.yaml @@ -0,0 +1,5 @@ +pr: 105096 +summary: Harden index mapping parameter check in enrich runner +area: Ingest Node +type: bug +issues: [] diff --git a/x-pack/plugin/enrich/qa/common/src/main/java/org/elasticsearch/test/enrich/CommonEnrichRestTestCase.java b/x-pack/plugin/enrich/qa/common/src/main/java/org/elasticsearch/test/enrich/CommonEnrichRestTestCase.java index 2878b36b0248c..7107553d8b8b3 100644 --- a/x-pack/plugin/enrich/qa/common/src/main/java/org/elasticsearch/test/enrich/CommonEnrichRestTestCase.java +++ b/x-pack/plugin/enrich/qa/common/src/main/java/org/elasticsearch/test/enrich/CommonEnrichRestTestCase.java @@ -246,6 +246,53 @@ public void testDeleteExistingPipeline() throws Exception { assertOK(client().performRequest(getRequest)); } + public void testEnrichSpecialTypes() throws IOException { + final String mapping = """ + + """; + var createIndexRequest = new Request("PUT", "source-enrich-vector"); + createIndexRequest.setJsonEntity(""" + { + "mappings": { + "properties": { + "keyword_field": { "type": "keyword" }, + "content_embedding": { "type": "sparse_vector" }, + "arbitrary_sparse_vector": { "type": "sparse_vector" } + } + } + } + """); + assertOK(adminClient().performRequest(createIndexRequest)); + var indexRequest = new Request("PUT", "/source-enrich-vector/_doc/1"); + indexRequest.setJsonEntity(""" + { + "arbitrary_sparse_vector": { "arbitrary_value": 7 }, + "content_embedding": { "arbitrary_value": 9}, + "keyword_field": 1214 + } + """); + assertOK(adminClient().performRequest(indexRequest)); + + var putEnrich = new Request("PUT", "/_enrich/policy/vector_policy"); + putEnrich.setJsonEntity(""" + { + "match": { + "indices": "source-enrich-vector", + "match_field": "keyword_field", + "enrich_fields": ["content_embedding", "arbitrary_sparse_vector"] + } + } + """); + assertOK(adminClient().performRequest(putEnrich)); + try { + var executeEnrich = new Request("PUT", "/_enrich/policy/vector_policy/_execute"); + assertOK(adminClient().performRequest(executeEnrich)); + } finally { + var deleteEnrich = new Request("DELETE", "/_enrich/policy/vector_policy"); + assertOK(adminClient().performRequest(deleteEnrich)); + } + } + public static String generatePolicySource(String index) throws IOException { return generatePolicySource(index, "host", "match"); } diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunner.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunner.java index 48b9e94cb71e9..5cb9c0cf9c051 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunner.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunner.java @@ -47,6 +47,7 @@ import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.reindex.BulkByScrollResponse; @@ -419,12 +420,18 @@ private static MapperService createMapperServiceForValidation(IndicesService ind } } - private static boolean isIndexableField(MapperService mapperService, String field, String type, Map properties) { - properties = new HashMap<>(properties); - properties.put("index", false); + static boolean isIndexableField(MapperService mapperService, String field, String type, Map properties) { + var withIndexParameter = new HashMap<>(properties); + withIndexParameter.put("index", false); Mapper.TypeParser parser = mapperService.getMapperRegistry().getMapperParser(type, IndexVersion.current()); - parser.parse(field, properties, mapperService.parserContext()); - return properties.containsKey("index") == false; + try { + parser.parse(field, withIndexParameter, mapperService.parserContext()); + return withIndexParameter.containsKey("index") == false; + } catch (MapperParsingException e) { + // hitting the mapper parsing exception means this field doesn't accept `index:false`. + assert e.getMessage().contains("unknown parameter [index]") : e; + return false; + } } private void prepareAndCreateEnrichIndex(List> mappings) { From f1b4878c72231bb108347a5c31247bbc68002426 Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Tue, 6 Feb 2024 13:30:04 -0500 Subject: [PATCH 082/106] [ML] Switch OpenAI and Cohere configuration to use model_id field instead of model (#105195) * Adding model_id for cohere * Preferring model_id * Updating openai request task settings * Removing logging from request * suggestedChanges --------- Co-authored-by: Max Hniebergall --- .../cohere/CohereEmbeddingsRequest.java | 8 +- .../cohere/CohereEmbeddingsRequestEntity.java | 2 +- .../openai/OpenAiEmbeddingsRequest.java | 6 +- .../services/cohere/CohereService.java | 54 +++++++--- .../cohere/CohereServiceSettings.java | 49 ++++++---- .../embeddings/CohereEmbeddingsModel.java | 5 +- .../CohereEmbeddingsServiceSettings.java | 4 +- .../services/openai/OpenAiService.java | 24 ++++- .../embeddings/OpenAiEmbeddingsModel.java | 5 +- .../OpenAiEmbeddingsRequestTaskSettings.java | 25 ++++- .../OpenAiEmbeddingsTaskSettings.java | 45 ++++++--- .../cohere/CohereServiceSettingsTests.java | 92 +++++++++++++++-- .../services/cohere/CohereServiceTests.java | 16 +-- .../CohereEmbeddingsServiceSettingsTests.java | 91 ++++++++++++++++- .../services/openai/OpenAiServiceTests.java | 28 +++--- ...nAiEmbeddingsRequestTaskSettingsTests.java | 43 ++++++-- .../OpenAiEmbeddingsTaskSettingsTests.java | 98 ++++++++++++++++--- 17 files changed, 480 insertions(+), 115 deletions(-) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequest.java index 30427aaa35869..d788a4667c532 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequest.java @@ -37,7 +37,7 @@ public class CohereEmbeddingsRequest implements Request { private final CohereEmbeddingsTaskSettings taskSettings; private final String model; private final CohereEmbeddingType embeddingType; - private final String modelId; + private final String inferenceEntityId; public CohereEmbeddingsRequest(CohereAccount account, List input, CohereEmbeddingsModel embeddingsModel) { Objects.requireNonNull(embeddingsModel); @@ -46,9 +46,9 @@ public CohereEmbeddingsRequest(CohereAccount account, List input, Cohere this.input = Objects.requireNonNull(input); uri = buildUri(this.account.url(), "Cohere", CohereEmbeddingsRequest::buildDefaultUri); taskSettings = embeddingsModel.getTaskSettings(); - model = embeddingsModel.getServiceSettings().getCommonSettings().getModel(); + model = embeddingsModel.getServiceSettings().getCommonSettings().getModelId(); embeddingType = embeddingsModel.getServiceSettings().getEmbeddingType(); - modelId = embeddingsModel.getInferenceEntityId(); + inferenceEntityId = embeddingsModel.getInferenceEntityId(); } @Override @@ -69,7 +69,7 @@ public HttpRequest createHttpRequest() { @Override public String getInferenceEntityId() { - return modelId; + return inferenceEntityId; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java index 9e34af5ed6385..be14c9cb066b4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/cohere/CohereEmbeddingsRequestEntity.java @@ -47,7 +47,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); builder.field(TEXTS_FIELD, input); if (model != null) { - builder.field(CohereServiceSettings.MODEL, model); + builder.field(CohereServiceSettings.OLD_MODEL_ID_FIELD, model); } if (taskSettings.getInputType() != null) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequest.java index dbb4c64f95637..b17f953357a57 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequest.java @@ -54,7 +54,11 @@ public HttpRequest createHttpRequest() { ByteArrayEntity byteEntity = new ByteArrayEntity( Strings.toString( - new OpenAiEmbeddingsRequestEntity(truncationResult.input(), model.getTaskSettings().model(), model.getTaskSettings().user()) + new OpenAiEmbeddingsRequestEntity( + truncationResult.input(), + model.getTaskSettings().modelId(), + model.getTaskSettings().user() + ) ).getBytes(StandardCharsets.UTF_8) ); httpPost.setEntity(byteEntity); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java index bb24faeaff6da..c4c83841c5a85 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java @@ -54,7 +54,7 @@ public String name() { @Override public CohereModel parseRequestConfig( - String modelId, + String inferenceEntityId, TaskType taskType, Map config, Set platformArchitectures @@ -63,12 +63,13 @@ public CohereModel parseRequestConfig( Map taskSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.TASK_SETTINGS); CohereModel model = createModel( - modelId, + inferenceEntityId, taskType, serviceSettingsMap, taskSettingsMap, serviceSettingsMap, - TaskType.unsupportedTaskTypeErrorMsg(taskType, NAME) + TaskType.unsupportedTaskTypeErrorMsg(taskType, NAME), + true ); throwIfNotEmptyMap(config, NAME); @@ -78,23 +79,43 @@ public CohereModel parseRequestConfig( return model; } - private static CohereModel createModel( - String modelId, + private static CohereModel createModelWithoutLoggingDeprecations( + String inferenceEntityId, TaskType taskType, Map serviceSettings, Map taskSettings, @Nullable Map secretSettings, String failureMessage + ) { + return createModel(inferenceEntityId, taskType, serviceSettings, taskSettings, secretSettings, failureMessage, false); + } + + private static CohereModel createModel( + String inferenceEntityId, + TaskType taskType, + Map serviceSettings, + Map taskSettings, + @Nullable Map secretSettings, + String failureMessage, + boolean logDeprecations ) { return switch (taskType) { - case TEXT_EMBEDDING -> new CohereEmbeddingsModel(modelId, taskType, NAME, serviceSettings, taskSettings, secretSettings); + case TEXT_EMBEDDING -> new CohereEmbeddingsModel( + inferenceEntityId, + taskType, + NAME, + serviceSettings, + taskSettings, + secretSettings, + logDeprecations + ); default -> throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); }; } @Override public CohereModel parsePersistedConfigWithSecrets( - String modelId, + String inferenceEntityId, TaskType taskType, Map config, Map secrets @@ -103,22 +124,29 @@ public CohereModel parsePersistedConfigWithSecrets( Map taskSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.TASK_SETTINGS); Map secretSettingsMap = removeFromMapOrThrowIfNull(secrets, ModelSecrets.SECRET_SETTINGS); - return createModel( - modelId, + return createModelWithoutLoggingDeprecations( + inferenceEntityId, taskType, serviceSettingsMap, taskSettingsMap, secretSettingsMap, - parsePersistedConfigErrorMsg(modelId, NAME) + parsePersistedConfigErrorMsg(inferenceEntityId, NAME) ); } @Override - public CohereModel parsePersistedConfig(String modelId, TaskType taskType, Map config) { + public CohereModel parsePersistedConfig(String inferenceEntityId, TaskType taskType, Map config) { Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); Map taskSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.TASK_SETTINGS); - return createModel(modelId, taskType, serviceSettingsMap, taskSettingsMap, null, parsePersistedConfigErrorMsg(modelId, NAME)); + return createModelWithoutLoggingDeprecations( + inferenceEntityId, + taskType, + serviceSettingsMap, + taskSettingsMap, + null, + parsePersistedConfigErrorMsg(inferenceEntityId, NAME) + ); } @Override @@ -180,7 +208,7 @@ private CohereEmbeddingsModel updateModelWithEmbeddingDetails(CohereEmbeddingsMo SimilarityMeasure.DOT_PRODUCT, embeddingSize, model.getServiceSettings().getCommonSettings().getMaxInputTokens(), - model.getServiceSettings().getCommonSettings().getModel() + model.getServiceSettings().getCommonSettings().getModelId() ), model.getServiceSettings().getEmbeddingType() ); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java index 7964741d90343..0d6f3b432b43c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettings.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.inference.services.cohere; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ValidationException; @@ -34,10 +36,13 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeAsType; public class CohereServiceSettings implements ServiceSettings { + + private static final Logger logger = LogManager.getLogger(CohereServiceSettings.class); public static final String NAME = "cohere_service_settings"; - public static final String MODEL = "model"; + public static final String OLD_MODEL_ID_FIELD = "model"; + public static final String MODEL_ID = "model_id"; - public static CohereServiceSettings fromMap(Map map) { + public static CohereServiceSettings fromMap(Map map, boolean logDeprecations) { ValidationException validationException = new ValidationException(); String url = extractOptionalString(map, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); @@ -46,33 +51,43 @@ public static CohereServiceSettings fromMap(Map map) { Integer dims = removeAsType(map, DIMENSIONS, Integer.class); Integer maxInputTokens = removeAsType(map, MAX_INPUT_TOKENS, Integer.class); URI uri = convertToUri(url, URL, ModelConfigurations.SERVICE_SETTINGS, validationException); - String model = extractOptionalString(map, MODEL, ModelConfigurations.SERVICE_SETTINGS, validationException); + String oldModelId = extractOptionalString(map, OLD_MODEL_ID_FIELD, ModelConfigurations.SERVICE_SETTINGS, validationException); + + String modelId = extractOptionalString(map, MODEL_ID, ModelConfigurations.SERVICE_SETTINGS, validationException); + + if (logDeprecations && oldModelId != null) { + logger.info("The cohere [service_settings.model] field is deprecated. Please use [service_settings.model_id] instead."); + } if (validationException.validationErrors().isEmpty() == false) { throw validationException; } - return new CohereServiceSettings(uri, similarity, dims, maxInputTokens, model); + return new CohereServiceSettings(uri, similarity, dims, maxInputTokens, getModelId(oldModelId, modelId)); + } + + private static String getModelId(@Nullable String model, @Nullable String modelId) { + return modelId != null ? modelId : model; } private final URI uri; private final SimilarityMeasure similarity; private final Integer dimensions; private final Integer maxInputTokens; - private final String model; + private final String modelId; public CohereServiceSettings( @Nullable URI uri, @Nullable SimilarityMeasure similarity, @Nullable Integer dimensions, @Nullable Integer maxInputTokens, - @Nullable String model + @Nullable String modelId ) { this.uri = uri; this.similarity = similarity; this.dimensions = dimensions; this.maxInputTokens = maxInputTokens; - this.model = model; + this.modelId = modelId; } public CohereServiceSettings( @@ -80,9 +95,9 @@ public CohereServiceSettings( @Nullable SimilarityMeasure similarity, @Nullable Integer dimensions, @Nullable Integer maxInputTokens, - @Nullable String model + @Nullable String modelId ) { - this(createOptionalUri(url), similarity, dimensions, maxInputTokens, model); + this(createOptionalUri(url), similarity, dimensions, maxInputTokens, modelId); } public CohereServiceSettings(StreamInput in) throws IOException { @@ -90,7 +105,7 @@ public CohereServiceSettings(StreamInput in) throws IOException { similarity = in.readOptionalEnum(SimilarityMeasure.class); dimensions = in.readOptionalVInt(); maxInputTokens = in.readOptionalVInt(); - model = in.readOptionalString(); + modelId = in.readOptionalString(); } public URI getUri() { @@ -109,8 +124,8 @@ public Integer getMaxInputTokens() { return maxInputTokens; } - public String getModel() { - return model; + public String getModelId() { + return modelId; } @Override @@ -141,8 +156,8 @@ public XContentBuilder toXContentFragment(XContentBuilder builder) throws IOExce if (maxInputTokens != null) { builder.field(MAX_INPUT_TOKENS, maxInputTokens); } - if (model != null) { - builder.field(MODEL, model); + if (modelId != null) { + builder.field(MODEL_ID, modelId); } return builder; @@ -160,7 +175,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalEnum(similarity); out.writeOptionalVInt(dimensions); out.writeOptionalVInt(maxInputTokens); - out.writeOptionalString(model); + out.writeOptionalString(modelId); } @Override @@ -172,11 +187,11 @@ public boolean equals(Object o) { && Objects.equals(similarity, that.similarity) && Objects.equals(dimensions, that.dimensions) && Objects.equals(maxInputTokens, that.maxInputTokens) - && Objects.equals(model, that.model); + && Objects.equals(modelId, that.modelId); } @Override public int hashCode() { - return Objects.hash(uri, similarity, dimensions, maxInputTokens, model); + return Objects.hash(uri, similarity, dimensions, maxInputTokens, modelId); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModel.java index a3afdc306b217..ddd3b71ef4538 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsModel.java @@ -31,13 +31,14 @@ public CohereEmbeddingsModel( String service, Map serviceSettings, Map taskSettings, - @Nullable Map secrets + @Nullable Map secrets, + boolean logDeprecations ) { this( modelId, taskType, service, - CohereEmbeddingsServiceSettings.fromMap(serviceSettings), + CohereEmbeddingsServiceSettings.fromMap(serviceSettings, logDeprecations), CohereEmbeddingsTaskSettings.fromMap(taskSettings), DefaultSecretSettings.fromMap(secrets) ); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java index 916e7fadcc8fb..a92ca02ff7281 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettings.java @@ -30,9 +30,9 @@ public class CohereEmbeddingsServiceSettings implements ServiceSettings { static final String EMBEDDING_TYPE = "embedding_type"; - public static CohereEmbeddingsServiceSettings fromMap(Map map) { + public static CohereEmbeddingsServiceSettings fromMap(Map map, boolean logDeprecations) { ValidationException validationException = new ValidationException(); - var commonServiceSettings = CohereServiceSettings.fromMap(map); + var commonServiceSettings = CohereServiceSettings.fromMap(map, logDeprecations); CohereEmbeddingType embeddingTypes = extractOptionalEnum( map, EMBEDDING_TYPE, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java index d795c75b6c178..8071513b817c9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java @@ -67,7 +67,8 @@ public OpenAiModel parseRequestConfig( serviceSettingsMap, taskSettingsMap, serviceSettingsMap, - TaskType.unsupportedTaskTypeErrorMsg(taskType, NAME) + TaskType.unsupportedTaskTypeErrorMsg(taskType, NAME), + true ); throwIfNotEmptyMap(config, NAME); @@ -77,13 +78,25 @@ public OpenAiModel parseRequestConfig( return model; } - private static OpenAiModel createModel( + private static OpenAiModel createModelWithoutLoggingDeprecations( String inferenceEntityId, TaskType taskType, Map serviceSettings, Map taskSettings, @Nullable Map secretSettings, String failureMessage + ) { + return createModel(inferenceEntityId, taskType, serviceSettings, taskSettings, secretSettings, failureMessage, false); + } + + private static OpenAiModel createModel( + String inferenceEntityId, + TaskType taskType, + Map serviceSettings, + Map taskSettings, + @Nullable Map secretSettings, + String failureMessage, + boolean logDeprecations ) { return switch (taskType) { case TEXT_EMBEDDING -> new OpenAiEmbeddingsModel( @@ -92,7 +105,8 @@ private static OpenAiModel createModel( NAME, serviceSettings, taskSettings, - secretSettings + secretSettings, + logDeprecations ); default -> throw new ElasticsearchStatusException(failureMessage, RestStatus.BAD_REQUEST); }; @@ -109,7 +123,7 @@ public OpenAiModel parsePersistedConfigWithSecrets( Map taskSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.TASK_SETTINGS); Map secretSettingsMap = removeFromMapOrThrowIfNull(secrets, ModelSecrets.SECRET_SETTINGS); - return createModel( + return createModelWithoutLoggingDeprecations( inferenceEntityId, taskType, serviceSettingsMap, @@ -124,7 +138,7 @@ public OpenAiModel parsePersistedConfig(String inferenceEntityId, TaskType taskT Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); Map taskSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.TASK_SETTINGS); - return createModel( + return createModelWithoutLoggingDeprecations( inferenceEntityId, taskType, serviceSettingsMap, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java index 74d97099bbb76..ba4f1ac479824 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModel.java @@ -36,14 +36,15 @@ public OpenAiEmbeddingsModel( String service, Map serviceSettings, Map taskSettings, - @Nullable Map secrets + @Nullable Map secrets, + boolean logDeprecations ) { this( inferenceEntityId, taskType, service, OpenAiServiceSettings.fromMap(serviceSettings), - OpenAiEmbeddingsTaskSettings.fromMap(taskSettings), + OpenAiEmbeddingsTaskSettings.fromMap(taskSettings, logDeprecations), DefaultSecretSettings.fromMap(secrets) ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java index 7df57516ad632..221bd61214455 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettings.java @@ -7,23 +7,29 @@ package org.elasticsearch.xpack.inference.services.openai.embeddings; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.common.ValidationException; +import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.ModelConfigurations; import java.util.Map; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; -import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsTaskSettings.MODEL; +import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsTaskSettings.MODEL_ID; +import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsTaskSettings.OLD_MODEL_ID_FIELD; import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsTaskSettings.USER; /** * This class handles extracting OpenAI task settings from a request. The difference between this class and * {@link OpenAiEmbeddingsTaskSettings} is that this class considers all fields as optional. It will not throw an error if a field * is missing. This allows overriding persistent task settings. - * @param model the name of the model to use with this request + * @param modelId the name of the model to use with this request * @param user a unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse */ -public record OpenAiEmbeddingsRequestTaskSettings(String model, String user) { +public record OpenAiEmbeddingsRequestTaskSettings(@Nullable String modelId, @Nullable String user) { + private static final Logger logger = LogManager.getLogger(OpenAiEmbeddingsRequestTaskSettings.class); + public static final OpenAiEmbeddingsRequestTaskSettings EMPTY_SETTINGS = new OpenAiEmbeddingsRequestTaskSettings(null, null); /** @@ -39,13 +45,22 @@ public static OpenAiEmbeddingsRequestTaskSettings fromMap(Map ma ValidationException validationException = new ValidationException(); - String model = extractOptionalString(map, MODEL, ModelConfigurations.TASK_SETTINGS, validationException); + // I'm intentionally not logging if this is set because it would log on every request + String model = extractOptionalString(map, OLD_MODEL_ID_FIELD, ModelConfigurations.TASK_SETTINGS, validationException); + + String modelId = extractOptionalString(map, MODEL_ID, ModelConfigurations.TASK_SETTINGS, validationException); String user = extractOptionalString(map, USER, ModelConfigurations.TASK_SETTINGS, validationException); + var modelIdToUse = getModelId(model, modelId); + if (validationException.validationErrors().isEmpty() == false) { throw validationException; } - return new OpenAiEmbeddingsRequestTaskSettings(model, user); + return new OpenAiEmbeddingsRequestTaskSettings(modelIdToUse, user); + } + + private static String getModelId(@Nullable String model, @Nullable String modelId) { + return modelId != null ? modelId : model; } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettings.java index c6f3179a4f088..d0042ff57efe2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettings.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.inference.services.openai.embeddings; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.common.ValidationException; @@ -16,38 +18,59 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.ServiceUtils; import java.io.IOException; import java.util.Map; import java.util.Objects; import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractOptionalString; -import static org.elasticsearch.xpack.inference.services.ServiceUtils.extractRequiredString; /** * Defines the task settings for the openai service. * - * @param model the id of the model to use in the requests to openai + * @param modelId the id of the model to use in the requests to openai * @param user an optional unique identifier representing the end-user, which can help OpenAI to monitor and detect abuse * see the openai docs for more details */ -public record OpenAiEmbeddingsTaskSettings(String model, @Nullable String user) implements TaskSettings { +public record OpenAiEmbeddingsTaskSettings(String modelId, @Nullable String user) implements TaskSettings { public static final String NAME = "openai_embeddings_task_settings"; - public static final String MODEL = "model"; + public static final String OLD_MODEL_ID_FIELD = "model"; + public static final String MODEL_ID = "model_id"; public static final String USER = "user"; + private static final String MODEL_DEPRECATION_MESSAGE = + "The openai [task_settings.model] field is deprecated. Please use [task_settings.model_id] instead."; + private static final Logger logger = LogManager.getLogger(OpenAiEmbeddingsTaskSettings.class); - public static OpenAiEmbeddingsTaskSettings fromMap(Map map) { + public static OpenAiEmbeddingsTaskSettings fromMap(Map map, boolean logDeprecations) { ValidationException validationException = new ValidationException(); - String model = extractRequiredString(map, MODEL, ModelConfigurations.TASK_SETTINGS, validationException); + String oldModelId = extractOptionalString(map, OLD_MODEL_ID_FIELD, ModelConfigurations.TASK_SETTINGS, validationException); + if (logDeprecations && oldModelId != null) { + logger.info(MODEL_DEPRECATION_MESSAGE); + } + + String modelId = extractOptionalString(map, MODEL_ID, ModelConfigurations.TASK_SETTINGS, validationException); String user = extractOptionalString(map, USER, ModelConfigurations.TASK_SETTINGS, validationException); + var modelIdToUse = getModelId(oldModelId, modelId, validationException); + if (validationException.validationErrors().isEmpty() == false) { throw validationException; } - return new OpenAiEmbeddingsTaskSettings(model, user); + return new OpenAiEmbeddingsTaskSettings(modelIdToUse, user); + } + + private static String getModelId(@Nullable String oldModelId, @Nullable String modelId, ValidationException validationException) { + var modelIdToUse = modelId != null ? modelId : oldModelId; + + if (modelIdToUse == null) { + validationException.addValidationError(ServiceUtils.missingSettingErrorMsg(MODEL_ID, ModelConfigurations.TASK_SETTINGS)); + } + + return modelIdToUse; } /** @@ -61,14 +84,14 @@ public static OpenAiEmbeddingsTaskSettings of( OpenAiEmbeddingsTaskSettings originalSettings, OpenAiEmbeddingsRequestTaskSettings requestSettings ) { - var modelToUse = requestSettings.model() == null ? originalSettings.model : requestSettings.model(); + var modelToUse = requestSettings.modelId() == null ? originalSettings.modelId : requestSettings.modelId(); var userToUse = requestSettings.user() == null ? originalSettings.user : requestSettings.user(); return new OpenAiEmbeddingsTaskSettings(modelToUse, userToUse); } public OpenAiEmbeddingsTaskSettings { - Objects.requireNonNull(model); + Objects.requireNonNull(modelId); } public OpenAiEmbeddingsTaskSettings(StreamInput in) throws IOException { @@ -78,7 +101,7 @@ public OpenAiEmbeddingsTaskSettings(StreamInput in) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(MODEL, model); + builder.field(MODEL_ID, modelId); if (user != null) { builder.field(USER, user); } @@ -98,7 +121,7 @@ public TransportVersion getMinimalSupportedVersion() { @Override public void writeTo(StreamOutput out) throws IOException { - out.writeString(model); + out.writeString(modelId); out.writeOptionalString(user); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java index 6f47d5c74d81c..321567dfa32e8 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceSettingsTests.java @@ -12,9 +12,13 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Nullable; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.inference.common.SimilarityMeasure; import org.elasticsearch.xpack.inference.services.ServiceFields; import org.elasticsearch.xpack.inference.services.ServiceUtils; +import org.hamcrest.CoreMatchers; import org.hamcrest.MatcherAssert; import java.io.IOException; @@ -69,10 +73,73 @@ public void testFromMap() { dims, ServiceFields.MAX_INPUT_TOKENS, maxInputTokens, - CohereServiceSettings.MODEL, + CohereServiceSettings.OLD_MODEL_ID_FIELD, model ) - ) + ), + true + ); + + MatcherAssert.assertThat( + serviceSettings, + is(new CohereServiceSettings(ServiceUtils.createUri(url), SimilarityMeasure.DOT_PRODUCT, dims, maxInputTokens, model)) + ); + } + + public void testFromMap_WhenUsingModelId() { + var url = "https://www.abc.com"; + var similarity = SimilarityMeasure.DOT_PRODUCT.toString(); + var dims = 1536; + var maxInputTokens = 512; + var model = "model"; + var serviceSettings = CohereServiceSettings.fromMap( + new HashMap<>( + Map.of( + ServiceFields.URL, + url, + ServiceFields.SIMILARITY, + similarity, + ServiceFields.DIMENSIONS, + dims, + ServiceFields.MAX_INPUT_TOKENS, + maxInputTokens, + CohereServiceSettings.MODEL_ID, + model + ) + ), + false + ); + + MatcherAssert.assertThat( + serviceSettings, + is(new CohereServiceSettings(ServiceUtils.createUri(url), SimilarityMeasure.DOT_PRODUCT, dims, maxInputTokens, model)) + ); + } + + public void testFromMap_PrefersModelId_OverModel() { + var url = "https://www.abc.com"; + var similarity = SimilarityMeasure.DOT_PRODUCT.toString(); + var dims = 1536; + var maxInputTokens = 512; + var model = "model"; + var serviceSettings = CohereServiceSettings.fromMap( + new HashMap<>( + Map.of( + ServiceFields.URL, + url, + ServiceFields.SIMILARITY, + similarity, + ServiceFields.DIMENSIONS, + dims, + ServiceFields.MAX_INPUT_TOKENS, + maxInputTokens, + CohereServiceSettings.OLD_MODEL_ID_FIELD, + "old_model", + CohereServiceSettings.MODEL_ID, + model + ) + ), + false ); MatcherAssert.assertThat( @@ -82,14 +149,14 @@ public void testFromMap() { } public void testFromMap_MissingUrl_DoesNotThrowException() { - var serviceSettings = CohereServiceSettings.fromMap(new HashMap<>(Map.of())); + var serviceSettings = CohereServiceSettings.fromMap(new HashMap<>(Map.of()), false); assertNull(serviceSettings.getUri()); } public void testFromMap_EmptyUrl_ThrowsError() { var thrownException = expectThrows( ValidationException.class, - () -> CohereServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.URL, ""))) + () -> CohereServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.URL, "")), false) ); MatcherAssert.assertThat( @@ -107,7 +174,7 @@ public void testFromMap_InvalidUrl_ThrowsError() { var url = "https://www.abc^.com"; var thrownException = expectThrows( ValidationException.class, - () -> CohereServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.URL, url))) + () -> CohereServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.URL, url)), false) ); MatcherAssert.assertThat( @@ -120,7 +187,7 @@ public void testFromMap_InvalidSimilarity_ThrowsError() { var similarity = "by_size"; var thrownException = expectThrows( ValidationException.class, - () -> CohereServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.SIMILARITY, similarity))) + () -> CohereServiceSettings.fromMap(new HashMap<>(Map.of(ServiceFields.SIMILARITY, similarity)), false) ); MatcherAssert.assertThat( @@ -129,6 +196,17 @@ public void testFromMap_InvalidSimilarity_ThrowsError() { ); } + public void testXContent_WritesModelId() throws IOException { + var entity = new CohereServiceSettings((String) null, null, null, null, "modelId"); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, CoreMatchers.is(""" + {"model_id":"modelId"}""")); + } + @Override protected Writeable.Reader instanceReader() { return CohereServiceSettings::new; @@ -152,7 +230,7 @@ public static Map getServiceSettingsMap(@Nullable String url, @N } if (model != null) { - map.put(CohereServiceSettings.MODEL, model); + map.put(CohereServiceSettings.OLD_MODEL_ID_FIELD, model); } return map; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java index 7daad207f9068..32c4acd109685 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java @@ -111,7 +111,7 @@ public void testParseRequestConfig_CreatesACohereEmbeddingsModel() throws IOExce var embeddingsModel = (CohereEmbeddingsModel) model; MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModel(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModelId(), is("model")); MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getEmbeddingType(), is(CohereEmbeddingType.FLOAT)); MatcherAssert.assertThat( embeddingsModel.getTaskSettings(), @@ -306,7 +306,7 @@ public void testParsePersistedConfigWithSecrets_CreatesACohereEmbeddingsModel() var embeddingsModel = (CohereEmbeddingsModel) model; MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModel(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModelId(), is("model")); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, null))); MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } @@ -396,7 +396,7 @@ public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExists var embeddingsModel = (CohereEmbeddingsModel) model; MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModel(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModelId(), is("model")); MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getEmbeddingType(), is(CohereEmbeddingType.INT8)); MatcherAssert.assertThat( embeddingsModel.getTaskSettings(), @@ -463,7 +463,7 @@ public void testParsePersistedConfigWithSecrets_NotThrowWhenAnExtraKeyExistsInSe var embeddingsModel = (CohereEmbeddingsModel) model; MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModel(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModelId(), is("model")); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, null))); MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } @@ -524,7 +524,7 @@ public void testParsePersistedConfigWithSecrets_NotThrowWhenAnExtraKeyExistsInTa var embeddingsModel = (CohereEmbeddingsModel) model; MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModel(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModelId(), is("model")); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(InputType.SEARCH, null))); MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } @@ -548,7 +548,7 @@ public void testParsePersistedConfig_CreatesACohereEmbeddingsModel() throws IOEx var embeddingsModel = (CohereEmbeddingsModel) model; MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModel(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModelId(), is("model")); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, CohereTruncation.NONE))); assertNull(embeddingsModel.getSecretSettings()); } @@ -596,7 +596,7 @@ public void testParsePersistedConfig_CreatesACohereEmbeddingsModelWithoutUrl() t var embeddingsModel = (CohereEmbeddingsModel) model; assertNull(embeddingsModel.getServiceSettings().getCommonSettings().getUri()); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModel(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModelId(), is("model")); MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getEmbeddingType(), is(CohereEmbeddingType.FLOAT)); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(null, null))); assertNull(embeddingsModel.getSecretSettings()); @@ -671,7 +671,7 @@ public void testParsePersistedConfig_NotThrowWhenAnExtraKeyExistsInTaskSettings( var embeddingsModel = (CohereEmbeddingsModel) model; MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModel(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModelId(), is("model")); MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(new CohereEmbeddingsTaskSettings(InputType.INGEST, null))); assertNull(embeddingsModel.getSecretSettings()); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java index e0b29ce9c34da..39aba9c281a0c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/embeddings/CohereEmbeddingsServiceSettingsTests.java @@ -57,13 +57,90 @@ public void testFromMap() { dims, ServiceFields.MAX_INPUT_TOKENS, maxInputTokens, - CohereServiceSettings.MODEL, + CohereServiceSettings.OLD_MODEL_ID_FIELD, model, CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, CohereEmbeddingType.INT8.toString() ) + ), + false + ); + + MatcherAssert.assertThat( + serviceSettings, + is( + new CohereEmbeddingsServiceSettings( + new CohereServiceSettings(ServiceUtils.createUri(url), SimilarityMeasure.DOT_PRODUCT, dims, maxInputTokens, model), + CohereEmbeddingType.INT8 + ) ) ); + } + + public void testFromMap_WithModelId() { + var url = "https://www.abc.com"; + var similarity = SimilarityMeasure.DOT_PRODUCT.toString(); + var dims = 1536; + var maxInputTokens = 512; + var model = "model"; + var serviceSettings = CohereEmbeddingsServiceSettings.fromMap( + new HashMap<>( + Map.of( + ServiceFields.URL, + url, + ServiceFields.SIMILARITY, + similarity, + ServiceFields.DIMENSIONS, + dims, + ServiceFields.MAX_INPUT_TOKENS, + maxInputTokens, + CohereServiceSettings.OLD_MODEL_ID_FIELD, + model, + CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, + CohereEmbeddingType.INT8.toString() + ) + ), + false + ); + + MatcherAssert.assertThat( + serviceSettings, + is( + new CohereEmbeddingsServiceSettings( + new CohereServiceSettings(ServiceUtils.createUri(url), SimilarityMeasure.DOT_PRODUCT, dims, maxInputTokens, model), + CohereEmbeddingType.INT8 + ) + ) + ); + } + + public void testFromMap_PrefersModelId_OverModel() { + var url = "https://www.abc.com"; + var similarity = SimilarityMeasure.DOT_PRODUCT.toString(); + var dims = 1536; + var maxInputTokens = 512; + var model = "model"; + var serviceSettings = CohereEmbeddingsServiceSettings.fromMap( + new HashMap<>( + Map.of( + ServiceFields.URL, + url, + ServiceFields.SIMILARITY, + similarity, + ServiceFields.DIMENSIONS, + dims, + ServiceFields.MAX_INPUT_TOKENS, + maxInputTokens, + CohereServiceSettings.OLD_MODEL_ID_FIELD, + "old_model", + CohereServiceSettings.MODEL_ID, + model, + CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, + CohereEmbeddingType.INT8.toString() + ) + ), + false + ); MatcherAssert.assertThat( serviceSettings, @@ -77,14 +154,14 @@ public void testFromMap() { } public void testFromMap_MissingEmbeddingType_DoesNotThrowException() { - var serviceSettings = CohereEmbeddingsServiceSettings.fromMap(new HashMap<>(Map.of())); + var serviceSettings = CohereEmbeddingsServiceSettings.fromMap(new HashMap<>(Map.of()), false); assertNull(serviceSettings.getEmbeddingType()); } public void testFromMap_EmptyEmbeddingType_ThrowsError() { var thrownException = expectThrows( ValidationException.class, - () -> CohereEmbeddingsServiceSettings.fromMap(new HashMap<>(Map.of(CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, ""))) + () -> CohereEmbeddingsServiceSettings.fromMap(new HashMap<>(Map.of(CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, "")), true) ); MatcherAssert.assertThat( @@ -101,7 +178,10 @@ public void testFromMap_EmptyEmbeddingType_ThrowsError() { public void testFromMap_InvalidEmbeddingType_ThrowsError() { var thrownException = expectThrows( ValidationException.class, - () -> CohereEmbeddingsServiceSettings.fromMap(new HashMap<>(Map.of(CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, "abc"))) + () -> CohereEmbeddingsServiceSettings.fromMap( + new HashMap<>(Map.of(CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, "abc")), + false + ) ); MatcherAssert.assertThat( @@ -118,7 +198,8 @@ public void testFromMap_ReturnsFailure_WhenEmbeddingTypesAreNotValid() { var exception = expectThrows( ElasticsearchStatusException.class, () -> CohereEmbeddingsServiceSettings.fromMap( - new HashMap<>(Map.of(CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, List.of("abc"))) + new HashMap<>(Map.of(CohereEmbeddingsServiceSettings.EMBEDDING_TYPE, List.of("abc"))), + false ) ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java index 2659715771686..1e40b86bb7597 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java @@ -108,7 +108,7 @@ public void testParseRequestConfig_CreatesAnOpenAiEmbeddingsModel() throws IOExc var embeddingsModel = (OpenAiEmbeddingsModel) model; assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); - assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().modelId(), is("model")); assertThat(embeddingsModel.getTaskSettings().user(), is("user")); assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } @@ -256,7 +256,7 @@ public void testParseRequestConfig_CreatesAnOpenAiEmbeddingsModelWithoutUserUrlO var embeddingsModel = (OpenAiEmbeddingsModel) model; assertNull(embeddingsModel.getServiceSettings().uri()); assertNull(embeddingsModel.getServiceSettings().organizationId()); - assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().modelId(), is("model")); assertNull(embeddingsModel.getTaskSettings().user()); assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } @@ -287,7 +287,7 @@ public void testParsePersistedConfigWithSecrets_CreatesAnOpenAiEmbeddingsModel() var embeddingsModel = (OpenAiEmbeddingsModel) model; assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); - assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().modelId(), is("model")); assertThat(embeddingsModel.getTaskSettings().user(), is("user")); assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } @@ -348,7 +348,7 @@ public void testParsePersistedConfigWithSecrets_CreatesAnOpenAiEmbeddingsModelWi var embeddingsModel = (OpenAiEmbeddingsModel) model; assertNull(embeddingsModel.getServiceSettings().uri()); assertNull(embeddingsModel.getServiceSettings().organizationId()); - assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().modelId(), is("model")); assertNull(embeddingsModel.getTaskSettings().user()); assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } @@ -380,7 +380,7 @@ public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExists var embeddingsModel = (OpenAiEmbeddingsModel) model; assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); - assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().modelId(), is("model")); assertThat(embeddingsModel.getTaskSettings().user(), is("user")); assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } @@ -414,7 +414,7 @@ public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExists var embeddingsModel = (OpenAiEmbeddingsModel) model; assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); - assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().modelId(), is("model")); assertThat(embeddingsModel.getTaskSettings().user(), is("user")); assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } @@ -446,7 +446,7 @@ public void testParsePersistedConfigWithSecrets_NotThrowWhenAnExtraKeyExistsInSe var embeddingsModel = (OpenAiEmbeddingsModel) model; assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); - assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().modelId(), is("model")); assertThat(embeddingsModel.getTaskSettings().user(), is("user")); assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } @@ -480,7 +480,7 @@ public void testParsePersistedConfigWithSecrets_NotThrowWhenAnExtraKeyExistsInSe var embeddingsModel = (OpenAiEmbeddingsModel) model; assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); - assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().modelId(), is("model")); assertThat(embeddingsModel.getTaskSettings().user(), is("user")); assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } @@ -514,7 +514,7 @@ public void testParsePersistedConfigWithSecrets_NotThrowWhenAnExtraKeyExistsInTa var embeddingsModel = (OpenAiEmbeddingsModel) model; assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); - assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().modelId(), is("model")); assertThat(embeddingsModel.getTaskSettings().user(), is("user")); assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } @@ -536,7 +536,7 @@ public void testParsePersistedConfig_CreatesAnOpenAiEmbeddingsModel() throws IOE var embeddingsModel = (OpenAiEmbeddingsModel) model; assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); - assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().modelId(), is("model")); assertThat(embeddingsModel.getTaskSettings().user(), is("user")); assertNull(embeddingsModel.getSecretSettings()); } @@ -579,7 +579,7 @@ public void testParsePersistedConfig_CreatesAnOpenAiEmbeddingsModelWithoutUserUr var embeddingsModel = (OpenAiEmbeddingsModel) model; assertNull(embeddingsModel.getServiceSettings().uri()); assertNull(embeddingsModel.getServiceSettings().organizationId()); - assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().modelId(), is("model")); assertNull(embeddingsModel.getTaskSettings().user()); assertNull(embeddingsModel.getSecretSettings()); } @@ -602,7 +602,7 @@ public void testParsePersistedConfig_DoesNotThrowWhenAnExtraKeyExistsInConfig() var embeddingsModel = (OpenAiEmbeddingsModel) model; assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); - assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().modelId(), is("model")); assertThat(embeddingsModel.getTaskSettings().user(), is("user")); assertNull(embeddingsModel.getSecretSettings()); } @@ -627,7 +627,7 @@ public void testParsePersistedConfig_NotThrowWhenAnExtraKeyExistsInServiceSettin var embeddingsModel = (OpenAiEmbeddingsModel) model; assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); - assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().modelId(), is("model")); assertThat(embeddingsModel.getTaskSettings().user(), is("user")); assertNull(embeddingsModel.getSecretSettings()); } @@ -652,7 +652,7 @@ public void testParsePersistedConfig_NotThrowWhenAnExtraKeyExistsInTaskSettings( var embeddingsModel = (OpenAiEmbeddingsModel) model; assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); - assertThat(embeddingsModel.getTaskSettings().model(), is("model")); + assertThat(embeddingsModel.getTaskSettings().modelId(), is("model")); assertThat(embeddingsModel.getTaskSettings().user(), is("user")); assertNull(embeddingsModel.getSecretSettings()); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettingsTests.java index b76e9f9a6d5c6..7f62e0b7efe3b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsRequestTaskSettingsTests.java @@ -19,36 +19,67 @@ public class OpenAiEmbeddingsRequestTaskSettingsTests extends ESTestCase { public void testFromMap_ReturnsEmptySettings_WhenTheMapIsEmpty() { var settings = OpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of())); - assertNull(settings.model()); + assertNull(settings.modelId()); assertNull(settings.user()); } public void testFromMap_ReturnsEmptySettings_WhenTheMapDoesNotContainTheFields() { var settings = OpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of("key", "model"))); - assertNull(settings.model()); + assertNull(settings.modelId()); assertNull(settings.user()); } public void testFromMap_ReturnsEmptyModel_WhenTheMapDoesNotContainThatField() { var settings = OpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.USER, "user"))); - assertNull(settings.model()); + assertNull(settings.modelId()); assertThat(settings.user(), is("user")); } public void testFromMap_ReturnsEmptyUser_WhenTheDoesMapNotContainThatField() { - var settings = OpenAiEmbeddingsRequestTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.MODEL, "model"))); + var settings = OpenAiEmbeddingsRequestTaskSettings.fromMap( + new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.OLD_MODEL_ID_FIELD, "model")) + ); assertNull(settings.user()); - assertThat(settings.model(), is("model")); + assertThat(settings.modelId(), is("model")); + } + + public void testFromMap_PrefersModelId_OverModel() { + var settings = OpenAiEmbeddingsRequestTaskSettings.fromMap( + new HashMap<>( + Map.of(OpenAiEmbeddingsTaskSettings.OLD_MODEL_ID_FIELD, "model", OpenAiEmbeddingsTaskSettings.MODEL_ID, "model_id") + ) + ); + + assertNull(settings.user()); + assertThat(settings.modelId(), is("model_id")); } public static Map getRequestTaskSettingsMap(@Nullable String model, @Nullable String user) { var map = new HashMap(); if (model != null) { - map.put(OpenAiEmbeddingsTaskSettings.MODEL, model); + map.put(OpenAiEmbeddingsTaskSettings.OLD_MODEL_ID_FIELD, model); + } + + if (user != null) { + map.put(OpenAiEmbeddingsTaskSettings.USER, user); + } + + return map; + } + + public static Map getRequestTaskSettingsMap(@Nullable String model, @Nullable String modelId, @Nullable String user) { + var map = new HashMap(); + + if (model != null) { + map.put(OpenAiEmbeddingsTaskSettings.OLD_MODEL_ID_FIELD, model); + } + + if (modelId != null) { + map.put(OpenAiEmbeddingsTaskSettings.MODEL_ID, model); } if (user != null) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettingsTests.java index 103fab071098e..f1488fa9bbe4c 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsTaskSettingsTests.java @@ -12,6 +12,10 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Nullable; import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentType; +import org.hamcrest.CoreMatchers; import org.hamcrest.MatcherAssert; import java.io.IOException; @@ -37,7 +41,7 @@ public static OpenAiEmbeddingsTaskSettings createRandom() { public void testFromMap_MissingModel_ThrowException() { var thrownException = expectThrows( ValidationException.class, - () -> OpenAiEmbeddingsTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.USER, "user"))) + () -> OpenAiEmbeddingsTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.USER, "user")), true) ); MatcherAssert.assertThat( @@ -45,7 +49,7 @@ public void testFromMap_MissingModel_ThrowException() { is( Strings.format( "Validation Failed: 1: [task_settings] does not contain the required setting [%s];", - OpenAiEmbeddingsTaskSettings.MODEL + OpenAiEmbeddingsTaskSettings.MODEL_ID ) ) ); @@ -53,23 +57,57 @@ public void testFromMap_MissingModel_ThrowException() { public void testFromMap_CreatesWithModelAndUser() { var taskSettings = OpenAiEmbeddingsTaskSettings.fromMap( - new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.MODEL, "model", OpenAiEmbeddingsTaskSettings.USER, "user")) + new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.OLD_MODEL_ID_FIELD, "model", OpenAiEmbeddingsTaskSettings.USER, "user")), + false ); - MatcherAssert.assertThat(taskSettings.model(), is("model")); + MatcherAssert.assertThat(taskSettings.modelId(), is("model")); + MatcherAssert.assertThat(taskSettings.user(), is("user")); + } + + public void testFromMap_CreatesWithModelId() { + var taskSettings = OpenAiEmbeddingsTaskSettings.fromMap( + new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.MODEL_ID, "model", OpenAiEmbeddingsTaskSettings.USER, "user")), + false + ); + + MatcherAssert.assertThat(taskSettings.modelId(), is("model")); + MatcherAssert.assertThat(taskSettings.user(), is("user")); + } + + public void testFromMap_PrefersModelId_OverModel() { + var taskSettings = OpenAiEmbeddingsTaskSettings.fromMap( + new HashMap<>( + Map.of( + OpenAiEmbeddingsTaskSettings.MODEL_ID, + "model", + OpenAiEmbeddingsTaskSettings.OLD_MODEL_ID_FIELD, + "old_model", + OpenAiEmbeddingsTaskSettings.USER, + "user" + ) + ), + false + ); + + MatcherAssert.assertThat(taskSettings.modelId(), is("model")); MatcherAssert.assertThat(taskSettings.user(), is("user")); } public void testFromMap_MissingUser_DoesNotThrowException() { - var taskSettings = OpenAiEmbeddingsTaskSettings.fromMap(new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.MODEL, "model"))); + var taskSettings = OpenAiEmbeddingsTaskSettings.fromMap( + new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.OLD_MODEL_ID_FIELD, "model")), + false + ); - MatcherAssert.assertThat(taskSettings.model(), is("model")); + MatcherAssert.assertThat(taskSettings.modelId(), is("model")); assertNull(taskSettings.user()); } public void testOverrideWith_KeepsOriginalValuesWithOverridesAreNull() { var taskSettings = OpenAiEmbeddingsTaskSettings.fromMap( - new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.MODEL, "model", OpenAiEmbeddingsTaskSettings.USER, "user")) + new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.OLD_MODEL_ID_FIELD, "model", OpenAiEmbeddingsTaskSettings.USER, "user")), + false ); var overriddenTaskSettings = OpenAiEmbeddingsTaskSettings.of(taskSettings, OpenAiEmbeddingsRequestTaskSettings.EMPTY_SETTINGS); @@ -78,11 +116,35 @@ public void testOverrideWith_KeepsOriginalValuesWithOverridesAreNull() { public void testOverrideWith_UsesOverriddenSettings() { var taskSettings = OpenAiEmbeddingsTaskSettings.fromMap( - new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.MODEL, "model", OpenAiEmbeddingsTaskSettings.USER, "user")) + new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.OLD_MODEL_ID_FIELD, "model", OpenAiEmbeddingsTaskSettings.USER, "user")), + false ); var requestTaskSettings = OpenAiEmbeddingsRequestTaskSettings.fromMap( - new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.MODEL, "model2", OpenAiEmbeddingsTaskSettings.USER, "user2")) + new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.OLD_MODEL_ID_FIELD, "model2", OpenAiEmbeddingsTaskSettings.USER, "user2")) + ); + + var overriddenTaskSettings = OpenAiEmbeddingsTaskSettings.of(taskSettings, requestTaskSettings); + MatcherAssert.assertThat(overriddenTaskSettings, is(new OpenAiEmbeddingsTaskSettings("model2", "user2"))); + } + + public void testOverrideWith_UsesOverriddenSettings_UsesModel2_FromModelIdField() { + var taskSettings = OpenAiEmbeddingsTaskSettings.fromMap( + new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.OLD_MODEL_ID_FIELD, "model", OpenAiEmbeddingsTaskSettings.USER, "user")), + false + ); + + var requestTaskSettings = OpenAiEmbeddingsRequestTaskSettings.fromMap( + new HashMap<>( + Map.of( + OpenAiEmbeddingsTaskSettings.MODEL_ID, + "model2", + OpenAiEmbeddingsTaskSettings.OLD_MODEL_ID_FIELD, + "model3", + OpenAiEmbeddingsTaskSettings.USER, + "user2" + ) + ) ); var overriddenTaskSettings = OpenAiEmbeddingsTaskSettings.of(taskSettings, requestTaskSettings); @@ -91,17 +153,29 @@ public void testOverrideWith_UsesOverriddenSettings() { public void testOverrideWith_UsesOnlyNonNullModelSetting() { var taskSettings = OpenAiEmbeddingsTaskSettings.fromMap( - new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.MODEL, "model", OpenAiEmbeddingsTaskSettings.USER, "user")) + new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.OLD_MODEL_ID_FIELD, "model", OpenAiEmbeddingsTaskSettings.USER, "user")), + false ); var requestTaskSettings = OpenAiEmbeddingsRequestTaskSettings.fromMap( - new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.MODEL, "model2")) + new HashMap<>(Map.of(OpenAiEmbeddingsTaskSettings.OLD_MODEL_ID_FIELD, "model2")) ); var overriddenTaskSettings = OpenAiEmbeddingsTaskSettings.of(taskSettings, requestTaskSettings); MatcherAssert.assertThat(overriddenTaskSettings, is(new OpenAiEmbeddingsTaskSettings("model2", "user"))); } + public void testXContent_WritesModelId() throws IOException { + var entity = new OpenAiEmbeddingsTaskSettings("modelId", null); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + entity.toXContent(builder, null); + String xContentResult = Strings.toString(builder); + + assertThat(xContentResult, CoreMatchers.is(""" + {"model_id":"modelId"}""")); + } + @Override protected Writeable.Reader instanceReader() { return OpenAiEmbeddingsTaskSettings::new; @@ -118,7 +192,7 @@ protected OpenAiEmbeddingsTaskSettings mutateInstance(OpenAiEmbeddingsTaskSettin } public static Map getTaskSettingsMap(String model, @Nullable String user) { - var map = new HashMap(Map.of(OpenAiEmbeddingsTaskSettings.MODEL, model)); + var map = new HashMap(Map.of(OpenAiEmbeddingsTaskSettings.OLD_MODEL_ID_FIELD, model)); if (user != null) { map.put(OpenAiEmbeddingsTaskSettings.USER, user); From 24f9682b7d0c1e6e0d3a5d3b71cedd758d01bc66 Mon Sep 17 00:00:00 2001 From: Jedr Blaszyk Date: Tue, 6 Feb 2024 19:35:06 +0100 Subject: [PATCH 083/106] [Connector API] Support filtering connectors by service type and a query (#105178) --- docs/changelog/105178.yaml | 5 ++ .../rest-api-spec/api/connector.list.json | 16 +++-- .../test/entsearch/310_connector_list.yml | 57 +++++++++++++++++- .../connector/ConnectorIndexService.java | 58 ++++++++++++++----- .../connector/action/ListConnectorAction.java | 44 ++++++++++++-- .../action/RestListConnectorAction.java | 12 +++- .../action/TransportListConnectorAction.java | 2 + .../connector/ConnectorIndexServiceTests.java | 12 +++- ...ectorActionRequestBWCSerializingTests.java | 12 +++- 9 files changed, 187 insertions(+), 31 deletions(-) create mode 100644 docs/changelog/105178.yaml diff --git a/docs/changelog/105178.yaml b/docs/changelog/105178.yaml new file mode 100644 index 0000000000000..e8fc9cfd6898f --- /dev/null +++ b/docs/changelog/105178.yaml @@ -0,0 +1,5 @@ +pr: 105178 +summary: "[Connector API] Support filtering connectors by service type and a query" +area: Application +type: enhancement +issues: [] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.list.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.list.json index 562190f6f5cad..67d2250d3c661 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.list.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.list.json @@ -30,15 +30,23 @@ "size": { "type": "int", "default": 100, - "description": "specifies a max number of results to get (default: 100)" + "description": "Specifies a max number of results to get (default: 100)" }, "index_name": { - "type": "string", - "description": "connector index name(s) to fetch connector documents for" + "type": "list", + "description": "A comma-separated list of connector index names to fetch connector documents for" }, "connector_name": { + "type": "list", + "description": "A comma-separated list of connector names to fetch connector documents for" + }, + "service_type": { + "type": "list", + "description": "A comma-separated list of connector service types to fetch connector documents for" + }, + "query": { "type": "string", - "description": "connector name(s) to fetch connector documents for" + "description": "A search string for querying connectors, filtering results by matching against connector names, descriptions, and index names" } } } diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/310_connector_list.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/310_connector_list.yml index 7aa49297902d5..540aecf2a8db0 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/310_connector_list.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/310_connector_list.yml @@ -30,7 +30,7 @@ setup: name: my-connector-2 language: en is_native: true - service_type: super-connector + service_type: extra-connector --- "List Connectors": @@ -163,4 +163,59 @@ setup: - match: { results.0.name: "my-connector-2" } +--- +"List Connector - filter by service type": + - do: + connector.list: + service_type: super-connector + + - match: { count: 2 } + - match: { results.0.id: "connector-a" } + - match: { results.1.id: "connector-c" } + + - do: + connector.list: + service_type: extra-connector + + - match: { count: 1 } + - match: { results.0.id: "connector-b" } + +--- +"List Connector - filter by multiple service types": + - do: + connector.list: + service_type: super-connector,extra-connector + + - match: { count: 3 } + +--- +"List Connector - filter by search query": + - do: + connector.list: + query: my-connector-1 + + - match: { count: 1 } + - match: { results.0.id: "connector-a" } + + - do: + connector.list: + query: my-connector + + - match: { count: 3 } + + + - do: + connector.list: + query: search-3-test + + - match: { count: 1 } + + - do: + connector.list: + query: search- + + - match: { count: 3 } + + + diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index 6392052e9f0b9..a7060fa6a97e9 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.common.Strings; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.IdsQueryBuilder; @@ -32,6 +33,7 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.query.TermsQueryBuilder; +import org.elasticsearch.index.query.WildcardQueryBuilder; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; @@ -279,25 +281,29 @@ public void deleteConnector(String connectorId, ActionListener l } /** - * List the {@link Connector} in ascending order of their index names. + * Lists {@link Connector}s in ascending order of index names, filtered by specified criteria. * - * @param from From index to start the search from. - * @param size The maximum number of {@link Connector}s to return. - * @param indexNames A list of index names to filter the connectors. - * @param connectorNames A list of connector names to further filter the search results. - * @param listener The action listener to invoke on response/failure. + * @param from Starting index for the search. + * @param size Maximum number of {@link Connector}s to retrieve. + * @param indexNames Filter connectors by these index names, if provided. + * @param connectorNames Filter connectors by connector names, if provided. + * @param serviceTypes Filter connectors by service types, if provided. + * @param searchQuery Apply a wildcard search on index name, connector name, and description, if provided. + * @param listener Invoked with search results or upon failure. */ public void listConnectors( int from, int size, List indexNames, List connectorNames, + List serviceTypes, + String searchQuery, ActionListener listener ) { try { final SearchSourceBuilder source = new SearchSourceBuilder().from(from) .size(size) - .query(buildListQuery(indexNames, connectorNames)) + .query(buildListQuery(indexNames, connectorNames, serviceTypes, searchQuery)) .fetchSource(true) .sort(Connector.INDEX_NAME_FIELD.getPreferredName(), SortOrder.ASC); final SearchRequest req = new SearchRequest(CONNECTOR_INDEX_NAME).source(source); @@ -326,18 +332,26 @@ public void onFailure(Exception e) { } /** - * Constructs a query for filtering instances of {@link Connector} based on index and/or connector names. - * Returns a {@link MatchAllQueryBuilder} if both parameters are empty or null, - * otherwise constructs a boolean query to filter by the provided lists. + * Builds a query to filter {@link Connector} instances by index names, connector names, service type, and/or search query. + * Returns a {@link MatchAllQueryBuilder} if no filters are applied, otherwise constructs a boolean query with the specified filters. * - * @param indexNames List of index names to filter by, or null/empty for no index name filtering. - * @param connectorNames List of connector names to filter by, or null/empty for no name filtering. - * @return A {@link QueryBuilder} tailored to the specified filters. + * @param indexNames List of index names for filtering, or null/empty to skip. + * @param connectorNames List of connector names for filtering, or null/empty to skip. + * @param serviceTypes List of connector service types for filtering, or null/empty to skip. + * @param searchQuery Search query for wildcard filtering on index name, connector name, and description, or null/empty to skip. + * @return A {@link QueryBuilder} customized based on provided filters. */ - private QueryBuilder buildListQuery(List indexNames, List connectorNames) { + private QueryBuilder buildListQuery( + List indexNames, + List connectorNames, + List serviceTypes, + String searchQuery + ) { boolean filterByIndexNames = indexNames != null && indexNames.isEmpty() == false; boolean filterByConnectorNames = indexNames != null && connectorNames.isEmpty() == false; - boolean usesFilter = filterByIndexNames || filterByConnectorNames; + boolean filterByServiceTypes = serviceTypes != null && serviceTypes.isEmpty() == false; + boolean filterBySearchQuery = Strings.isNullOrEmpty(searchQuery) == false; + boolean usesFilter = filterByIndexNames || filterByConnectorNames || filterByServiceTypes || filterBySearchQuery; BoolQueryBuilder boolFilterQueryBuilder = new BoolQueryBuilder(); @@ -348,6 +362,20 @@ private QueryBuilder buildListQuery(List indexNames, List connec if (filterByConnectorNames) { boolFilterQueryBuilder.must().add(new TermsQueryBuilder(Connector.NAME_FIELD.getPreferredName(), connectorNames)); } + if (filterByServiceTypes) { + boolFilterQueryBuilder.must().add(new TermsQueryBuilder(Connector.SERVICE_TYPE_FIELD.getPreferredName(), serviceTypes)); + } + if (filterBySearchQuery) { + String wildcardQueryValue = '*' + searchQuery + '*'; + boolFilterQueryBuilder.must() + .add( + new BoolQueryBuilder().should( + new WildcardQueryBuilder(Connector.INDEX_NAME_FIELD.getPreferredName(), wildcardQueryValue) + ) + .should(new WildcardQueryBuilder(Connector.NAME_FIELD.getPreferredName(), wildcardQueryValue)) + .should(new WildcardQueryBuilder(Connector.DESCRIPTION_FIELD.getPreferredName(), wildcardQueryValue)) + ); + } } return usesFilter ? boolFilterQueryBuilder : new MatchAllQueryBuilder(); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/ListConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/ListConnectorAction.java index 13a588fdd6314..e4eeea0bf20ef 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/ListConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/ListConnectorAction.java @@ -20,6 +20,7 @@ import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.Connector; import org.elasticsearch.xpack.application.connector.ConnectorSearchResult; import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.action.util.QueryPage; @@ -44,22 +45,35 @@ public static class Request extends ActionRequest implements ToXContentObject { private final PageParams pageParams; private final List indexNames; private final List connectorNames; + private final List connectorServiceTypes; + private final String connectorSearchQuery; private static final ParseField PAGE_PARAMS_FIELD = new ParseField("pageParams"); private static final ParseField INDEX_NAMES_FIELD = new ParseField("index_names"); private static final ParseField NAMES_FIELD = new ParseField("names"); + private static final ParseField SEARCH_QUERY_FIELD = new ParseField("query"); public Request(StreamInput in) throws IOException { super(in); this.pageParams = new PageParams(in); this.indexNames = in.readOptionalStringCollectionAsList(); this.connectorNames = in.readOptionalStringCollectionAsList(); + this.connectorServiceTypes = in.readOptionalStringCollectionAsList(); + this.connectorSearchQuery = in.readOptionalString(); } - public Request(PageParams pageParams, List indexNames, List connectorNames) { + public Request( + PageParams pageParams, + List indexNames, + List connectorNames, + List serviceTypes, + String connectorSearchQuery + ) { this.pageParams = pageParams; this.indexNames = indexNames; this.connectorNames = connectorNames; + this.connectorServiceTypes = serviceTypes; + this.connectorSearchQuery = connectorSearchQuery; } public PageParams getPageParams() { @@ -74,6 +88,14 @@ public List getConnectorNames() { return connectorNames; } + public List getConnectorServiceTypes() { + return connectorServiceTypes; + } + + public String getConnectorSearchQuery() { + return connectorSearchQuery; + } + @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; @@ -97,6 +119,8 @@ public void writeTo(StreamOutput out) throws IOException { pageParams.writeTo(out); out.writeOptionalStringCollection(indexNames); out.writeOptionalStringCollection(connectorNames); + out.writeOptionalStringCollection(connectorServiceTypes); + out.writeOptionalString(connectorSearchQuery); } @Override @@ -106,24 +130,34 @@ public boolean equals(Object o) { ListConnectorAction.Request request = (ListConnectorAction.Request) o; return Objects.equals(pageParams, request.pageParams) && Objects.equals(indexNames, request.indexNames) - && Objects.equals(connectorNames, request.connectorNames); + && Objects.equals(connectorNames, request.connectorNames) + && Objects.equals(connectorServiceTypes, request.connectorServiceTypes) + && Objects.equals(connectorSearchQuery, request.connectorSearchQuery); } @Override public int hashCode() { - return Objects.hash(pageParams, indexNames, connectorNames); + return Objects.hash(pageParams, indexNames, connectorNames, connectorServiceTypes, connectorSearchQuery); } @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( "list_connector_request", - p -> new ListConnectorAction.Request((PageParams) p[0], (List) p[1], (List) p[2]) + p -> new ListConnectorAction.Request( + (PageParams) p[0], + (List) p[1], + (List) p[2], + (List) p[3], + (String) p[4] + ) ); static { PARSER.declareObject(constructorArg(), (p, c) -> PageParams.fromXContent(p), PAGE_PARAMS_FIELD); PARSER.declareStringArray(optionalConstructorArg(), INDEX_NAMES_FIELD); PARSER.declareStringArray(optionalConstructorArg(), NAMES_FIELD); + PARSER.declareStringArray(optionalConstructorArg(), Connector.SERVICE_TYPE_FIELD); + PARSER.declareString(optionalConstructorArg(), SEARCH_QUERY_FIELD); } public static ListConnectorAction.Request parse(XContentParser parser) { @@ -137,6 +171,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(PAGE_PARAMS_FIELD.getPreferredName(), pageParams); builder.field(INDEX_NAMES_FIELD.getPreferredName(), indexNames); builder.field(NAMES_FIELD.getPreferredName(), connectorNames); + builder.field(Connector.SERVICE_TYPE_FIELD.getPreferredName(), connectorServiceTypes); + builder.field(SEARCH_QUERY_FIELD.getPreferredName(), connectorSearchQuery); } builder.endObject(); return builder; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestListConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestListConnectorAction.java index 90232b340719d..765f6eca12290 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestListConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestListConnectorAction.java @@ -41,8 +41,16 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient int size = restRequest.paramAsInt("size", PageParams.DEFAULT_SIZE); List indexNames = List.of(restRequest.paramAsStringArray(Connector.INDEX_NAME_FIELD.getPreferredName(), new String[0])); List connectorNames = List.of(restRequest.paramAsStringArray("connector_name", new String[0])); - - ListConnectorAction.Request request = new ListConnectorAction.Request(new PageParams(from, size), indexNames, connectorNames); + List serviceTypes = List.of(restRequest.paramAsStringArray("service_type", new String[0])); + String searchQuery = restRequest.param("query"); + + ListConnectorAction.Request request = new ListConnectorAction.Request( + new PageParams(from, size), + indexNames, + connectorNames, + serviceTypes, + searchQuery + ); return channel -> client.execute(ListConnectorAction.INSTANCE, request, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportListConnectorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportListConnectorAction.java index 03334751c5a42..11705ef3f7035 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportListConnectorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportListConnectorAction.java @@ -48,6 +48,8 @@ protected void doExecute(Task task, ListConnectorAction.Request request, ActionL pageParams.getSize(), request.getIndexNames(), request.getConnectorNames(), + request.getConnectorServiceTypes(), + request.getConnectorSearchQuery(), listener.map(r -> new ListConnectorAction.Response(r.connectors(), r.totalResults())) ); } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java index 52bfd64db1844..08943e96031c8 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorIndexServiceTests.java @@ -534,12 +534,18 @@ public void onFailure(Exception e) { return resp.get(); } - private ConnectorIndexService.ConnectorResult awaitListConnector(int from, int size, List indexNames, List names) - throws Exception { + private ConnectorIndexService.ConnectorResult awaitListConnector( + int from, + int size, + List indexNames, + List names, + List serviceTypes, + String searchQuery + ) throws Exception { CountDownLatch latch = new CountDownLatch(1); final AtomicReference resp = new AtomicReference<>(null); final AtomicReference exc = new AtomicReference<>(null); - connectorIndexService.listConnectors(from, size, indexNames, names, new ActionListener<>() { + connectorIndexService.listConnectors(from, size, indexNames, names, serviceTypes, searchQuery, new ActionListener<>() { @Override public void onResponse(ConnectorIndexService.ConnectorResult result) { resp.set(result); diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/ListConnectorActionRequestBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/ListConnectorActionRequestBWCSerializingTests.java index 3d2192098d907..366001b6dd215 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/ListConnectorActionRequestBWCSerializingTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/ListConnectorActionRequestBWCSerializingTests.java @@ -29,7 +29,9 @@ protected ListConnectorAction.Request createTestInstance() { return new ListConnectorAction.Request( pageParams, List.of(generateRandomStringArray(10, 10, false)), - List.of(generateRandomStringArray(10, 10, false)) + List.of(generateRandomStringArray(10, 10, false)), + List.of(generateRandomStringArray(10, 10, false)), + randomAlphaOfLengthBetween(3, 10) ); } @@ -45,6 +47,12 @@ protected ListConnectorAction.Request doParseInstance(XContentParser parser) thr @Override protected ListConnectorAction.Request mutateInstanceForVersion(ListConnectorAction.Request instance, TransportVersion version) { - return new ListConnectorAction.Request(instance.getPageParams(), instance.getIndexNames(), instance.getConnectorNames()); + return new ListConnectorAction.Request( + instance.getPageParams(), + instance.getIndexNames(), + instance.getConnectorNames(), + instance.getConnectorServiceTypes(), + instance.getConnectorSearchQuery() + ); } } From 3d380cc89989ffac7491b7a26d90a4ff51fe35e0 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 6 Feb 2024 19:16:58 +0000 Subject: [PATCH 084/106] Fix compilation (#105211) The changes in #105183 clashed with #104363 --- .../nlp/tokenizers/XLMRobertaTokenizerTests.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTokenizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTokenizerTests.java index 1ac0f0d536988..bff2c6a94d789 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTokenizerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/XLMRobertaTokenizerTests.java @@ -95,13 +95,13 @@ public void testSurrogatePair() throws IOException { new XLMRobertaTokenization(false, null, Tokenization.Truncate.NONE, -1) ).build() ) { - TokenizationResult.Tokens tokenization = tokenizer.tokenize("😀", Tokenization.Truncate.NONE, -1, 0).get(0); + TokenizationResult.Tokens tokenization = tokenizer.tokenize("😀", Tokenization.Truncate.NONE, -1, 0, null).get(0); assertThat(tokenStrings(tokenization.tokens().get(0)), contains("▁\uD83D\uDE00")); - tokenization = tokenizer.tokenize("Elasticsearch 😀", Tokenization.Truncate.NONE, -1, 0).get(0); + tokenization = tokenizer.tokenize("Elasticsearch 😀", Tokenization.Truncate.NONE, -1, 0, null).get(0); assertThat(tokenStrings(tokenization.tokens().get(0)), contains("▁Ela", "stic", "search", "▁\uD83D\uDE00")); - tokenization = tokenizer.tokenize("Elasticsearch 😀 fun", Tokenization.Truncate.NONE, -1, 0).get(0); + tokenization = tokenizer.tokenize("Elasticsearch 😀 fun", Tokenization.Truncate.NONE, -1, 0, null).get(0); assertThat(tokenStrings(tokenization.tokens().get(0)), contains("▁Ela", "stic", "search", "▁\uD83D\uDE00", "▁fun")); } } @@ -114,11 +114,11 @@ public void testMultiByteEmoji() throws IOException { new XLMRobertaTokenization(false, null, Tokenization.Truncate.NONE, -1) ).build() ) { - TokenizationResult.Tokens tokenization = tokenizer.tokenize("🇸🇴", Tokenization.Truncate.NONE, -1, 0).get(0); + TokenizationResult.Tokens tokenization = tokenizer.tokenize("🇸🇴", Tokenization.Truncate.NONE, -1, 0, null).get(0); assertThat(tokenStrings(tokenization.tokens().get(0)), contains("▁🇸🇴")); assertThat(tokenization.tokenIds()[0], not(equalTo(3))); // not the unknown token - tokenization = tokenizer.tokenize("🏁", Tokenization.Truncate.NONE, -1, 0).get(0); + tokenization = tokenizer.tokenize("🏁", Tokenization.Truncate.NONE, -1, 0, null).get(0); assertThat(tokenStrings(tokenization.tokens().get(0)), contains("▁🏁")); assertThat(tokenization.tokenIds()[0], equalTo(3)); // the unknown token (not in the vocabulary) } From 9ac31a8af186168044f9b05cbe562784a2104f86 Mon Sep 17 00:00:00 2001 From: Pat Whelan Date: Tue, 6 Feb 2024 14:41:25 -0500 Subject: [PATCH 085/106] [Transform] Remove duplicate checkpoint audits (#105164) Transform Checkpoints have a chance to log duplicate audits or drop iterations. The volatile counters can be read and incremented in multiple threads, potentially storing the same value back into memory. Replacing volatile counters with a single Atomic counter, which counts down the iterations until it reaches zero, then updates the counter to the next audited checkpoint. Closes #105106 --- docs/changelog/105164.yaml | 6 +++ .../transforms/TransformIndexer.java | 39 +++++++++++-------- .../ClientTransformIndexerTests.java | 3 +- 3 files changed, 29 insertions(+), 19 deletions(-) create mode 100644 docs/changelog/105164.yaml diff --git a/docs/changelog/105164.yaml b/docs/changelog/105164.yaml new file mode 100644 index 0000000000000..7affb0911bc6d --- /dev/null +++ b/docs/changelog/105164.yaml @@ -0,0 +1,6 @@ +pr: 105164 +summary: Remove duplicate checkpoint audits +area: Transform +type: bug +issues: + - 105106 diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java index f6d4ae2d53c9a..e56a54a166b39 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java @@ -57,6 +57,7 @@ import java.util.Objects; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Stream; @@ -119,8 +120,7 @@ private enum RunState { private Map nextChangeCollectorBucketPosition = null; private volatile Integer initialConfiguredPageSize; - private volatile long logEvery = 1; - private volatile long logCount = 0; + private final AtomicInteger remainingCheckpointsUntilAudit = new AtomicInteger(0); private volatile TransformCheckpoint lastCheckpoint; private volatile TransformCheckpoint nextCheckpoint; @@ -1154,26 +1154,31 @@ private SearchRequest buildQueryToUpdateDestinationIndex() { } /** - * Indicates if an audit message should be written when onFinish is called for the given checkpoint - * We audit the first checkpoint, and then every 10 checkpoints until completedCheckpoint == 99 - * Then we audit every 100, until completedCheckpoint == 999 - * - * Then we always audit every 1_000 checkpoints + * Indicates if an audit message should be written when onFinish is called for the given checkpoint. + * We audit every checkpoint for the first 10 checkpoints until completedCheckpoint == 9. + * Then we audit every 10th checkpoint until completedCheckpoint == 99. + * Then we audit every 100th checkpoint until completedCheckpoint == 999. + * Then we always audit every 1_000th checkpoints. * * @param completedCheckpoint The checkpoint that was just completed * @return {@code true} if an audit message should be written */ protected boolean shouldAuditOnFinish(long completedCheckpoint) { - if (++logCount % logEvery != 0) { - return false; - } - if (completedCheckpoint == 0) { - return true; - } - int log10Checkpoint = (int) Math.floor(Math.log10(completedCheckpoint)); - logEvery = log10Checkpoint >= 3 ? 1_000 : (int) Math.pow(10.0, log10Checkpoint); - logCount = 0; - return true; + return remainingCheckpointsUntilAudit.getAndUpdate(count -> { + if (count > 0) { + return count - 1; + } + + if (completedCheckpoint >= 1000) { + return 999; + } else if (completedCheckpoint >= 100) { + return 99; + } else if (completedCheckpoint >= 10) { + return 9; + } else { + return 0; + } + }) == 0; } private RunState determineRunStateAtStart() { diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java index 1c6d1615cbb9c..ba7c09ed35a6d 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java @@ -68,7 +68,6 @@ import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; -import java.util.stream.Collectors; import java.util.stream.IntStream; import static org.mockito.Mockito.mock; @@ -78,7 +77,7 @@ public class ClientTransformIndexerTests extends ESTestCase { public void testAuditOnFinishFrequency() { ClientTransformIndexer indexer = createTestIndexer(); - List shouldAudit = IntStream.range(0, 100_000).boxed().map(indexer::shouldAuditOnFinish).collect(Collectors.toList()); + List shouldAudit = IntStream.range(0, 100_000).boxed().map(indexer::shouldAuditOnFinish).toList(); // Audit every checkpoint for the first 10 assertTrue(shouldAudit.get(0)); From ed7f9775236c1e7b98d38763e5deec35f86a848f Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 6 Feb 2024 15:13:08 -0500 Subject: [PATCH 086/106] ESQL: More tests for `STATS BY blah, blah` syntax (#105204) In ESQL you can write a grouping `STATS` command without any any functions - just `STATS BY foo, bar, baz` and we'll calculate all the combinations of group keys without running any functions. We only have a single example of that in our tests though! This adds two more that are slightly more complex. Just out of paranoia. --- .../src/main/resources/stats.csv-spec | 27 +++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 22d0f8c307c12..3ec1117b193e0 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -583,6 +583,33 @@ M null ; +aggsWithoutStatsTwo +FROM employees | STATS BY gender, still_hired | SORT gender, still_hired; + +gender:keyword | still_hired:boolean +F | false +F | true +M | false +M | true +null | false +null | true +; + +aggsWithoutStatsFormula +FROM employees | EVAL birth_decade = ROUND(DATE_EXTRACT("YEAR", birth_date), -1) | STATS BY gender, birth_decade | SORT gender, birth_decade; + +gender:keyword | birth_decade:long +F | 1950 +F | 1960 +F | null +M | 1950 +M | 1960 +M | 1970 +M | null +null | 1950 +null | 1960 +; + countFieldNoGrouping from employees | where emp_no < 10050 | stats c = count(salary); From 2c5c134623514768e07108b8fb4e139b328c8d8c Mon Sep 17 00:00:00 2001 From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> Date: Tue, 6 Feb 2024 15:28:51 -0500 Subject: [PATCH 087/106] [ML] Inference service support for multilingual-e5 builtin and customEland text_embedding models (#104949) Add text_embedding service Add support for multilingual-e5 builtin models Add support for custom eland text_embedding models --- docs/changelog/104949.yaml | 5 + .../org/elasticsearch/TransportVersions.java | 1 + .../inference/InferenceService.java | 20 +- .../results/TextEmbeddingResults.java | 35 ++ .../xpack/core/ml/utils/ExceptionsHelper.java | 11 +- .../inference/InferenceBaseRestTest.java | 9 + .../xpack/inference/InferenceCrudIT.java | 8 +- .../inference/MockInferenceServiceIT.java | 22 +- .../xpack/inference/TextEmbeddingCrudIT.java | 167 ++++++++ .../mock/TestInferenceServiceExtension.java | 7 +- .../integration/ModelRegistryImplIT.java | 18 +- .../InferenceNamedWriteablesProvider.java | 24 +- .../xpack/inference/InferencePlugin.java | 8 +- .../TransportPutInferenceModelAction.java | 31 +- .../inference/services/ServiceUtils.java | 8 + .../services/cohere/CohereService.java | 39 +- ...NodeModel.java => ElserInternalModel.java} | 10 +- ...Service.java => ElserInternalService.java} | 116 +++-- .../elser/ElserInternalServiceSettings.java | 122 ++++++ .../elser/ElserMlNodeServiceSettings.java | 188 --------- .../huggingface/HuggingFaceBaseService.java | 37 +- .../services/openai/OpenAiService.java | 39 +- .../settings/InternalServiceSettings.java | 129 ++++++ .../CustomElandInternalServiceSettings.java | 97 +++++ .../textembedding/CustomElandModel.java | 79 ++++ ...lingualE5SmallInternalServiceSettings.java | 106 +++++ .../MultilingualE5SmallModel.java | 80 ++++ .../TextEmbeddingInternalService.java | 397 ++++++++++++++++++ .../TextEmbeddingInternalServiceSettings.java | 42 ++ .../textembedding/TextEmbeddingModel.java | 39 ++ .../inference/ModelConfigurationsTests.java | 4 +- .../services/SenderServiceTests.java | 7 +- .../services/cohere/CohereServiceTests.java | 127 +++--- ...=> ElserInternalServiceSettingsTests.java} | 86 ++-- .../elser/ElserInternalServiceTests.java | 314 ++++++++++++++ .../elser/ElserMlNodeServiceTests.java | 244 ----------- .../huggingface/HuggingFaceServiceTests.java | 93 ++-- .../services/openai/OpenAiServiceTests.java | 146 ++++--- ...alE5SmallInternalServiceSettingsTests.java | 153 +++++++ .../TextEmbeddingInternalServiceTests.java | 349 +++++++++++++++ 40 files changed, 2617 insertions(+), 800 deletions(-) create mode 100644 docs/changelog/104949.yaml create mode 100644 x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/TextEmbeddingCrudIT.java rename x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/{ElserMlNodeModel.java => ElserInternalModel.java} (76%) rename x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/{ElserMlNodeService.java => ElserInternalService.java} (76%) create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceSettings.java delete mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/InternalServiceSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/textembedding/CustomElandInternalServiceSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/textembedding/CustomElandModel.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/textembedding/MultilingualE5SmallInternalServiceSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/textembedding/MultilingualE5SmallModel.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/textembedding/TextEmbeddingInternalService.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/textembedding/TextEmbeddingInternalServiceSettings.java create mode 100644 x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/textembedding/TextEmbeddingModel.java rename x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/{ElserMlNodeServiceSettingsTests.java => ElserInternalServiceSettingsTests.java} (51%) create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java delete mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/textembedding/MultilingualE5SmallInternalServiceSettingsTests.java create mode 100644 x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/textembedding/TextEmbeddingInternalServiceTests.java diff --git a/docs/changelog/104949.yaml b/docs/changelog/104949.yaml new file mode 100644 index 0000000000000..c2682fc911f1d --- /dev/null +++ b/docs/changelog/104949.yaml @@ -0,0 +1,5 @@ +pr: 104949 +summary: Add text_embedding inference service with multilingual-e5 and custom eland models +area: Machine Learning +type: enhancement +issues: [ ] diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 86d7cf9c718df..7d7f7b66356d0 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -171,6 +171,7 @@ static TransportVersion def(int id) { public static final TransportVersion TRANSFORM_GET_BASIC_STATS = def(8_584_00_0); public static final TransportVersion NLP_DOCUMENT_CHUNKING_ADDED = def(8_585_00_0); public static final TransportVersion SEARCH_TIMEOUT_EXCEPTION_ADDED = def(8_586_00_0); + public static final TransportVersion ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED = def(8_587_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceService.java b/server/src/main/java/org/elasticsearch/inference/InferenceService.java index 26c8eac53b0fb..605c799a2ba99 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceService.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceService.java @@ -29,18 +29,24 @@ default void init(Client client) {} * {@code service_settings} field. * This function modifies {@code config map}, fields are removed * from the map as they are read. - * + *

* If the map contains unrecognized configuration option an * {@code ElasticsearchStatusException} is thrown. * - * @param modelId Model Id - * @param taskType The model task type - * @param config Configuration options including the secrets + * @param modelId Model Id + * @param taskType The model task type + * @param config Configuration options including the secrets * @param platfromArchitectures The Set of platform architectures (OS name and hardware architecture) - * the cluster nodes and models are running on. - * @return The parsed {@link Model} + * the cluster nodes and models are running on. + * @param parsedModelListener A listener which will handle the resulting model or failure */ - Model parseRequestConfig(String modelId, TaskType taskType, Map config, Set platfromArchitectures); + void parseRequestConfig( + String modelId, + TaskType taskType, + Map config, + Set platfromArchitectures, + ActionListener parsedModelListener + ); /** * Parse model configuration from {@code config map} from persisted storage and return the parsed {@link Model}. This requires that diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingResults.java index 75eb4ebc19902..15271c1da58fa 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingResults.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.inference.results; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -14,6 +15,7 @@ import org.elasticsearch.inference.InferenceResults; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.inference.TaskType; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -59,6 +61,30 @@ public TextEmbeddingResults(StreamInput in) throws IOException { ); } + public static TextEmbeddingResults of(List results) { + List embeddings = new ArrayList<>(results.size()); + for (InferenceResults result : results) { + if (result instanceof org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults embeddingResult) { + embeddings.add(Embedding.of(embeddingResult)); + } else if (result instanceof org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults errorResult) { + if (errorResult.getException() instanceof ElasticsearchStatusException statusException) { + throw statusException; + } else { + throw new ElasticsearchStatusException( + "Received error inference result.", + RestStatus.INTERNAL_SERVER_ERROR, + errorResult.getException() + ); + } + } else { + throw new IllegalArgumentException( + "Received invalid inference result, was type of " + result.getClass().getName() + " but expected TextEmbeddingResults." + ); + } + } + return new TextEmbeddingResults(embeddings); + } + @Override public int getFirstEmbeddingSize() { return TextEmbeddingUtils.getFirstEmbeddingSize(new ArrayList<>(embeddings)); @@ -116,6 +142,15 @@ public Embedding(StreamInput in) throws IOException { this(in.readCollectionAsImmutableList(StreamInput::readFloat)); } + public static Embedding of(org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults embeddingResult) { + List embeddingAsList = new ArrayList<>(); + float[] embeddingAsArray = embeddingResult.getInferenceAsFloat(); + for (float dim : embeddingAsArray) { + embeddingAsList.add(dim); + } + return new Embedding(embeddingAsList); + } + @Override public int getSize() { return values.size(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExceptionsHelper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExceptionsHelper.java index 57b08ad3f3e31..f45c1f55862b2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExceptionsHelper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExceptionsHelper.java @@ -51,11 +51,18 @@ public static ResourceNotFoundException missingModelDeployment(String deployment } public static ResourceNotFoundException missingTrainedModel(String modelId) { - return new ResourceNotFoundException("No known trained model with model_id [{}], you may need to create it", modelId); + return new ResourceNotFoundException( + "No known trained model with model_id [{}], you may need to create it or load it into the cluster with eland", + modelId + ); } public static ResourceNotFoundException missingTrainedModel(String modelId, Exception cause) { - return new ResourceNotFoundException("No known trained model with model_id [{}], you may need to create it", cause, modelId); + return new ResourceNotFoundException( + "No known trained model with model_id [{}], you may need to create it or load it into the cluster with eland", + cause, + modelId + ); } public static ElasticsearchException serverError(String msg) { diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java index 1fe011acc44a3..c1d39ba0aba38 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceBaseRestTest.java @@ -115,6 +115,7 @@ protected Map getModels(String modelId, TaskType taskType) throw } protected Map getAllModels() throws IOException { + var endpoint = Strings.format("_inference/_all"); return getAllModelInternal("_inference/_all"); } @@ -173,4 +174,12 @@ protected static void assertOkOrCreated(Response response) throws IOException { String responseStr = EntityUtils.toString(response.getEntity()); assertThat(responseStr, response.getStatusLine().getStatusCode(), anyOf(equalTo(200), equalTo(201))); } + + protected Map getTrainedModel(String inferenceEntityId) throws IOException { + var endpoint = Strings.format("_ml/trained_models/%s/_stats", inferenceEntityId); + var request = new Request("GET", endpoint); + var response = client().performRequest(request); + assertOkOrCreated(response); + return entityAsMap(response); + } } diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java index 82e8e5aedc378..23b34c5f500e8 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java @@ -77,10 +77,10 @@ public void testDeleteModelWithWrongTaskType() throws IOException { @SuppressWarnings("unchecked") public void testGetModelWithAnyTaskType() throws IOException { - String modelId = "sparse_embedding_model"; - putModel(modelId, mockServiceModelConfig(), TaskType.SPARSE_EMBEDDING); - var singleModel = (List>) getModels(modelId, TaskType.ANY).get("models"); - assertEquals(modelId, singleModel.get(0).get("model_id")); + String inferenceEntityId = "sparse_embedding_model"; + putModel(inferenceEntityId, mockServiceModelConfig(), TaskType.SPARSE_EMBEDDING); + var singleModel = (List>) getModels(inferenceEntityId, TaskType.ANY).get("models"); + assertEquals(inferenceEntityId, singleModel.get(0).get("model_id")); assertEquals(TaskType.SPARSE_EMBEDDING.toString(), singleModel.get(0).get("task_type")); } diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockInferenceServiceIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockInferenceServiceIT.java index 1629f4c845ec2..1c8e7a96e6b5c 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockInferenceServiceIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/MockInferenceServiceIT.java @@ -17,30 +17,30 @@ public class MockInferenceServiceIT extends InferenceBaseRestTest { @SuppressWarnings("unchecked") public void testMockService() throws IOException { - String modelId = "test-mock"; - var putModel = putModel(modelId, mockServiceModelConfig(), TaskType.SPARSE_EMBEDDING); - var getModels = getModels(modelId, TaskType.SPARSE_EMBEDDING); + String inferenceEntityId = "test-mock"; + var putModel = putModel(inferenceEntityId, mockServiceModelConfig(), TaskType.SPARSE_EMBEDDING); + var getModels = getModels(inferenceEntityId, TaskType.SPARSE_EMBEDDING); var model = ((List>) getModels.get("models")).get(0); for (var modelMap : List.of(putModel, model)) { - assertEquals(modelId, modelMap.get("model_id")); + assertEquals(inferenceEntityId, modelMap.get("model_id")); assertEquals(TaskType.SPARSE_EMBEDDING, TaskType.fromString((String) modelMap.get("task_type"))); assertEquals("test_service", modelMap.get("service")); } // The response is randomly generated, the input can be anything - var inference = inferOnMockService(modelId, List.of(randomAlphaOfLength(10))); + var inference = inferOnMockService(inferenceEntityId, List.of(randomAlphaOfLength(10))); assertNonEmptyInferenceResults(inference, 1, TaskType.SPARSE_EMBEDDING); } @SuppressWarnings("unchecked") public void testMockServiceWithMultipleInputs() throws IOException { - String modelId = "test-mock-with-multi-inputs"; - putModel(modelId, mockServiceModelConfig(), TaskType.SPARSE_EMBEDDING); + String inferenceEntityId = "test-mock-with-multi-inputs"; + putModel(inferenceEntityId, mockServiceModelConfig(), TaskType.SPARSE_EMBEDDING); // The response is randomly generated, the input can be anything var inference = inferOnMockService( - modelId, + inferenceEntityId, TaskType.SPARSE_EMBEDDING, List.of(randomAlphaOfLength(5), randomAlphaOfLength(10), randomAlphaOfLength(15)) ); @@ -50,9 +50,9 @@ public void testMockServiceWithMultipleInputs() throws IOException { @SuppressWarnings("unchecked") public void testMockService_DoesNotReturnSecretsInGetResponse() throws IOException { - String modelId = "test-mock"; - var putModel = putModel(modelId, mockServiceModelConfig(), TaskType.SPARSE_EMBEDDING); - var getModels = getModels(modelId, TaskType.SPARSE_EMBEDDING); + String inferenceEntityId = "test-mock"; + var putModel = putModel(inferenceEntityId, mockServiceModelConfig(), TaskType.SPARSE_EMBEDDING); + var getModels = getModels(inferenceEntityId, TaskType.SPARSE_EMBEDDING); var model = ((List>) getModels.get("models")).get(0); var serviceSettings = (Map) model.get("service_settings"); diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/TextEmbeddingCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/TextEmbeddingCrudIT.java new file mode 100644 index 0000000000000..24a701095ecb7 --- /dev/null +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/TextEmbeddingCrudIT.java @@ -0,0 +1,167 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.apache.lucene.tests.util.LuceneTestCase; +import org.elasticsearch.client.Request; +import org.elasticsearch.common.Strings; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.plugins.Platforms; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; + +// Tests disabled in CI due to the models being too large to download. Can be enabled (commented out) for local testing +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105198") +public class TextEmbeddingCrudIT extends InferenceBaseRestTest { + + public void testPutE5Small_withNoModelVariant() throws IOException { + // Model downloaded automatically & test infer with no model variant + { + String inferenceEntityId = randomAlphaOfLength(10).toLowerCase(); + putTextEmbeddingModel(inferenceEntityId, TaskType.TEXT_EMBEDDING, noModelIdVariantJsonEntity()); + var models = getTrainedModel("_all"); + assertThat(models.toString(), containsString("deployment_id=" + inferenceEntityId)); + + Map results = inferOnMockService( + inferenceEntityId, + TaskType.TEXT_EMBEDDING, + List.of("hello world", "this is the second document") + ); + assertTrue(((List) ((Map) ((List) results.get("text_embedding")).get(0)).get("embedding")).size() > 1); + // there exists embeddings + assertTrue(((List) results.get("text_embedding")).size() == 2); + // there are two sets of embeddings + deleteTextEmbeddingModel(inferenceEntityId); + } + } + + public void testPutE5Small_withPlatformAgnosticVariant() throws IOException { + String inferenceEntityId = randomAlphaOfLength(10).toLowerCase(); + putTextEmbeddingModel(inferenceEntityId, TaskType.TEXT_EMBEDDING, platformAgnosticModelVariantJsonEntity()); + var models = getTrainedModel("_all"); + assertThat(models.toString(), containsString("deployment_id=" + inferenceEntityId)); + + Map results = inferOnMockService( + inferenceEntityId, + TaskType.TEXT_EMBEDDING, + List.of("hello world", "this is the second document") + ); + assertTrue(((List) ((Map) ((List) results.get("text_embedding")).get(0)).get("embedding")).size() > 1); + // there exists embeddings + assertTrue(((List) results.get("text_embedding")).size() == 2); + // there are two sets of embeddings + deleteTextEmbeddingModel(inferenceEntityId); + } + + public void testPutE5Small_withPlatformSpecificVariant() throws IOException { + String inferenceEntityId = randomAlphaOfLength(10).toLowerCase(); + if ("linux-x86_64".equals(Platforms.PLATFORM_NAME)) { + putTextEmbeddingModel(inferenceEntityId, TaskType.TEXT_EMBEDDING, platformSpecificModelVariantJsonEntity()); + var models = getTrainedModel("_all"); + assertThat(models.toString(), containsString("deployment_id=" + inferenceEntityId)); + + Map results = inferOnMockService( + inferenceEntityId, + TaskType.TEXT_EMBEDDING, + List.of("hello world", "this is the second document") + ); + assertTrue(((List) ((Map) ((List) results.get("text_embedding")).get(0)).get("embedding")).size() > 1); + // there exists embeddings + assertTrue(((List) results.get("text_embedding")).size() == 2); + // there are two sets of embeddings + deleteTextEmbeddingModel(inferenceEntityId); + } else { + expectThrows( + org.elasticsearch.client.ResponseException.class, + () -> putTextEmbeddingModel(inferenceEntityId, TaskType.TEXT_EMBEDDING, platformSpecificModelVariantJsonEntity()) + ); + } + } + + public void testPutE5Small_withFakeModelVariant() { + String inferenceEntityId = randomAlphaOfLength(10).toLowerCase(); + expectThrows( + org.elasticsearch.client.ResponseException.class, + () -> putTextEmbeddingModel(inferenceEntityId, TaskType.TEXT_EMBEDDING, fakeModelVariantJsonEntity()) + ); + + } + + private Map deleteTextEmbeddingModel(String inferenceEntityId) throws IOException { + var endpoint = Strings.format("_inference/%s/%s", "text_embedding", inferenceEntityId); + var request = new Request("DELETE", endpoint); + var response = client().performRequest(request); + assertOkOrCreated(response); + return entityAsMap(response); + } + + private Map putTextEmbeddingModel(String inferenceEntityId, TaskType taskType, String jsonEntity) throws IOException { + var endpoint = Strings.format("_inference/%s/%s", taskType, inferenceEntityId); + var request = new Request("PUT", endpoint); + + request.setJsonEntity(jsonEntity); + var response = client().performRequest(request); + assertOkOrCreated(response); + return entityAsMap(response); + } + + private String noModelIdVariantJsonEntity() { + return """ + { + "service": "text_embedding", + "service_settings": { + "num_allocations": 1, + "num_threads": 1 + } + } + """; + } + + private String platformAgnosticModelVariantJsonEntity() { + return """ + { + "service": "text_embedding", + "service_settings": { + "num_allocations": 1, + "num_threads": 1, + "model_id": ".multilingual-e5-small" + } + } + """; + } + + private String platformSpecificModelVariantJsonEntity() { + return """ + { + "service": "text_embedding", + "service_settings": { + "num_allocations": 1, + "num_threads": 1, + "model_id": ".multilingual-e5-small_linux-x86_64" + } + } + """; + } + + private String fakeModelVariantJsonEntity() { + return """ + { + "service": "text_embedding", + "service_settings": { + "num_allocations": 1, + "num_threads": 1, + "model_id": ".not-a-real-model-variant" + } + } + """; + } +} diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestInferenceServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestInferenceServiceExtension.java index 79db30b4b14e8..13172534eaa60 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestInferenceServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestInferenceServiceExtension.java @@ -75,11 +75,12 @@ private static Map getTaskSettingsMap(Map settin @Override @SuppressWarnings("unchecked") - public TestServiceModel parseRequestConfig( + public void parseRequestConfig( String modelId, TaskType taskType, Map config, - Set platfromArchitectures + Set platfromArchitectures, + ActionListener parsedModelListener ) { var serviceSettingsMap = (Map) config.remove(ModelConfigurations.SERVICE_SETTINGS); var serviceSettings = TestServiceSettings.fromMap(serviceSettingsMap); @@ -88,7 +89,7 @@ public TestServiceModel parseRequestConfig( var taskSettingsMap = getTaskSettingsMap(config); var taskSettings = TestTaskSettings.fromMap(taskSettingsMap); - return new TestServiceModel(modelId, taskType, name(), serviceSettings, taskSettings, secretSettings); + parsedModelListener.onResponse(new TestServiceModel(modelId, taskType, name(), serviceSettings, taskSettings, secretSettings)); } @Override diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryImplIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryImplIT.java index 614ebee99ae4f..174e9df08143d 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryImplIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/integration/ModelRegistryImplIT.java @@ -26,10 +26,10 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.InferencePlugin; import org.elasticsearch.xpack.inference.registry.ModelRegistryImpl; -import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeModel; -import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeService; -import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeServiceSettingsTests; -import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeServiceTests; +import org.elasticsearch.xpack.inference.services.elser.ElserInternalModel; +import org.elasticsearch.xpack.inference.services.elser.ElserInternalService; +import org.elasticsearch.xpack.inference.services.elser.ElserInternalServiceSettingsTests; +import org.elasticsearch.xpack.inference.services.elser.ElserInternalServiceTests; import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeTaskSettingsTests; import org.junit.Before; @@ -116,8 +116,8 @@ public void testGetModel() throws Exception { assertEquals(model.getConfigurations().getService(), modelHolder.get().service()); - var elserService = new ElserMlNodeService(new InferenceServiceExtension.InferenceServiceFactoryContext(mock(Client.class))); - ElserMlNodeModel roundTripModel = elserService.parsePersistedConfigWithSecrets( + var elserService = new ElserInternalService(new InferenceServiceExtension.InferenceServiceFactoryContext(mock(Client.class))); + ElserInternalModel roundTripModel = elserService.parsePersistedConfigWithSecrets( modelHolder.get().inferenceEntityId(), modelHolder.get().taskType(), modelHolder.get().settings(), @@ -273,7 +273,7 @@ public void testGetModelWithSecrets() throws InterruptedException { } private Model buildElserModelConfig(String inferenceEntityId, TaskType taskType) { - return ElserMlNodeServiceTests.randomModelConfig(inferenceEntityId, taskType); + return ElserInternalServiceTests.randomModelConfig(inferenceEntityId, taskType); } protected void blockingCall(Consumer> function, AtomicReference response, AtomicReference error) @@ -296,8 +296,8 @@ private static Model buildModelWithUnknownField(String inferenceEntityId) { new ModelWithUnknownField( inferenceEntityId, TaskType.SPARSE_EMBEDDING, - ElserMlNodeService.NAME, - ElserMlNodeServiceSettingsTests.createRandom(), + ElserInternalService.NAME, + ElserInternalServiceSettingsTests.createRandom(), ElserMlNodeTaskSettingsTests.createRandom() ) ); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java index efde4f28a27e1..f067d7740a2ef 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceNamedWriteablesProvider.java @@ -23,7 +23,7 @@ import org.elasticsearch.xpack.inference.services.cohere.CohereServiceSettings; import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsServiceSettings; import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsTaskSettings; -import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeServiceSettings; +import org.elasticsearch.xpack.inference.services.elser.ElserInternalServiceSettings; import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeTaskSettings; import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettings; import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserSecretSettings; @@ -31,6 +31,8 @@ import org.elasticsearch.xpack.inference.services.openai.OpenAiServiceSettings; import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsTaskSettings; import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; +import org.elasticsearch.xpack.inference.services.textembedding.MultilingualE5SmallInternalServiceSettings; +import org.elasticsearch.xpack.inference.services.textembedding.TextEmbeddingInternalServiceSettings; import java.util.ArrayList; import java.util.List; @@ -82,14 +84,30 @@ public static List getNamedWriteables() { // Default secret settings namedWriteables.add(new NamedWriteableRegistry.Entry(SecretSettings.class, DefaultSecretSettings.NAME, DefaultSecretSettings::new)); - // ELSER config + // Internal ELSER config namedWriteables.add( - new NamedWriteableRegistry.Entry(ServiceSettings.class, ElserMlNodeServiceSettings.NAME, ElserMlNodeServiceSettings::new) + new NamedWriteableRegistry.Entry(ServiceSettings.class, ElserInternalServiceSettings.NAME, ElserInternalServiceSettings::new) ); namedWriteables.add( new NamedWriteableRegistry.Entry(TaskSettings.class, ElserMlNodeTaskSettings.NAME, ElserMlNodeTaskSettings::new) ); + // Internal TextEmbedding service config + namedWriteables.add( + new NamedWriteableRegistry.Entry( + ServiceSettings.class, + TextEmbeddingInternalServiceSettings.NAME, + TextEmbeddingInternalServiceSettings::new + ) + ); + namedWriteables.add( + new NamedWriteableRegistry.Entry( + ServiceSettings.class, + MultilingualE5SmallInternalServiceSettings.NAME, + MultilingualE5SmallInternalServiceSettings::new + ) + ); + // Hugging Face config namedWriteables.add( new NamedWriteableRegistry.Entry( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 905a92e899784..2273845c43516 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -60,10 +60,11 @@ import org.elasticsearch.xpack.inference.rest.RestPutInferenceModelAction; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.cohere.CohereService; -import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeService; +import org.elasticsearch.xpack.inference.services.elser.ElserInternalService; import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceService; import org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserService; import org.elasticsearch.xpack.inference.services.openai.OpenAiService; +import org.elasticsearch.xpack.inference.services.textembedding.TextEmbeddingInternalService; import java.util.ArrayList; import java.util.Collection; @@ -160,11 +161,12 @@ public void loadExtensions(ExtensionLoader loader) { public List getInferenceServiceFactories() { return List.of( - ElserMlNodeService::new, + ElserInternalService::new, context -> new HuggingFaceElserService(httpFactory, serviceComponents), context -> new HuggingFaceService(httpFactory, serviceComponents), context -> new OpenAiService(httpFactory, serviceComponents), - context -> new CohereService(httpFactory, serviceComponents) + context -> new CohereService(httpFactory, serviceComponents), + TextEmbeddingInternalService::new ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java index b2a30a3b7e931..1a6f449383ec0 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportPutInferenceModelAction.java @@ -185,21 +185,28 @@ private void parseAndStoreModel( Set platformArchitectures, ActionListener listener ) { - var model = service.parseRequestConfig(inferenceEntityId, taskType, config, platformArchitectures); - - service.checkModelConfig( - model, - listener.delegateFailureAndWrap( - // model is valid good to persist then start - (delegate, verifiedModel) -> modelRegistry.storeModel( - verifiedModel, - delegate.delegateFailureAndWrap((l, r) -> startModel(service, verifiedModel, l)) + ActionListener modelListener = listener.delegateFailureAndWrap((delegate, model) -> { + service.checkModelConfig( + model, + delegate.delegateFailureAndWrap( + // model is valid, ok to persist then start + (delegate2, verifiedModel) -> modelRegistry.storeModel( + verifiedModel, + delegate2.delegateFailureAndWrap((l, r) -> putAndStartModel(service, verifiedModel, l)) + ) ) - ) - ); + ); + }); + + service.parseRequestConfig(inferenceEntityId, taskType, config, platformArchitectures, modelListener); + } - private static void startModel(InferenceService service, Model model, ActionListener finalListener) { + private static void putAndStartModel( + InferenceService service, + Model model, + ActionListener finalListener + ) { SubscribableListener.newForked((listener1) -> { service.putModel(model, listener1); }).< PutInferenceModelAction.Response>andThen((listener2, modelDidPut) -> { if (modelDidPut) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java index 7637bd9740670..bea2100643b19 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceUtils.java @@ -253,6 +253,14 @@ private static > void validateEnumValue(E enumValue, EnumSet diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java index c4c83841c5a85..0cccb7ff73989 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java @@ -53,30 +53,35 @@ public String name() { } @Override - public CohereModel parseRequestConfig( + public void parseRequestConfig( String inferenceEntityId, TaskType taskType, Map config, - Set platformArchitectures + Set platformArchitectures, + ActionListener parsedModelListener ) { - Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); - Map taskSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.TASK_SETTINGS); + try { + Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); + Map taskSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.TASK_SETTINGS); - CohereModel model = createModel( - inferenceEntityId, - taskType, - serviceSettingsMap, - taskSettingsMap, - serviceSettingsMap, - TaskType.unsupportedTaskTypeErrorMsg(taskType, NAME), - true - ); + CohereModel model = createModel( + inferenceEntityId, + taskType, + serviceSettingsMap, + taskSettingsMap, + serviceSettingsMap, + TaskType.unsupportedTaskTypeErrorMsg(taskType, NAME), + true + ); - throwIfNotEmptyMap(config, NAME); - throwIfNotEmptyMap(serviceSettingsMap, NAME); - throwIfNotEmptyMap(taskSettingsMap, NAME); + throwIfNotEmptyMap(config, NAME); + throwIfNotEmptyMap(serviceSettingsMap, NAME); + throwIfNotEmptyMap(taskSettingsMap, NAME); - return model; + parsedModelListener.onResponse(model); + } catch (Exception e) { + parsedModelListener.onFailure(e); + } } private static CohereModel createModelWithoutLoggingDeprecations( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalModel.java similarity index 76% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeModel.java rename to x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalModel.java index e23ae76659700..82c0052e16970 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalModel.java @@ -11,21 +11,21 @@ import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.TaskType; -public class ElserMlNodeModel extends Model { +public class ElserInternalModel extends Model { - public ElserMlNodeModel( + public ElserInternalModel( String inferenceEntityId, TaskType taskType, String service, - ElserMlNodeServiceSettings serviceSettings, + ElserInternalServiceSettings serviceSettings, ElserMlNodeTaskSettings taskSettings ) { super(new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings)); } @Override - public ElserMlNodeServiceSettings getServiceSettings() { - return (ElserMlNodeServiceSettings) super.getServiceSettings(); + public ElserInternalServiceSettings getServiceSettings() { + return (ElserInternalServiceSettings) super.getServiceSettings(); } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java similarity index 76% rename from x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java rename to x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java index 10e7b73a43c28..0229a0eb1c0cb 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalService.java @@ -41,6 +41,7 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextExpansionConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TokenizationConfigUpdate; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.inference.services.ServiceUtils; import java.io.IOException; import java.util.List; @@ -53,7 +54,7 @@ import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrThrowIfNull; import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; -public class ElserMlNodeService implements InferenceService { +public class ElserInternalService implements InferenceService { public static final String NAME = "elser"; @@ -62,15 +63,17 @@ public class ElserMlNodeService implements InferenceService { static final String ELSER_V2_MODEL = ".elser_model_2"; static final String ELSER_V2_MODEL_LINUX_X86 = ".elser_model_2_linux-x86_64"; - public static Set VALID_ELSER_MODELS = Set.of( - ElserMlNodeService.ELSER_V1_MODEL, - ElserMlNodeService.ELSER_V2_MODEL, - ElserMlNodeService.ELSER_V2_MODEL_LINUX_X86 + public static Set VALID_ELSER_MODEL_IDS = Set.of( + ElserInternalService.ELSER_V1_MODEL, + ElserInternalService.ELSER_V2_MODEL, + ElserInternalService.ELSER_V2_MODEL_LINUX_X86 ); + private static final String OLD_MODEL_ID_FIELD_NAME = "model_version"; + private final OriginSettingClient client; - public ElserMlNodeService(InferenceServiceExtension.InferenceServiceFactoryContext context) { + public ElserInternalService(InferenceServiceExtension.InferenceServiceFactoryContext context) { this.client = new OriginSettingClient(context.client(), ClientHelper.INFERENCE_ORIGIN); } @@ -79,38 +82,51 @@ public boolean isInClusterService() { } @Override - public ElserMlNodeModel parseRequestConfig( + public void parseRequestConfig( String inferenceEntityId, TaskType taskType, Map config, - Set modelArchitectures + Set modelArchitectures, + ActionListener parsedModelListener ) { - Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); - var serviceSettingsBuilder = ElserMlNodeServiceSettings.fromMap(serviceSettingsMap); + try { + Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); + var serviceSettingsBuilder = ElserInternalServiceSettings.fromMap(serviceSettingsMap); - if (serviceSettingsBuilder.getModelVariant() == null) { - serviceSettingsBuilder.setModelVariant(selectDefaultModelVersionBasedOnClusterArchitecture(modelArchitectures)); - } + if (serviceSettingsBuilder.getModelId() == null) { + serviceSettingsBuilder.setModelId(selectDefaultModelVersionBasedOnClusterArchitecture(modelArchitectures)); + } - Map taskSettingsMap; - // task settings are optional - if (config.containsKey(ModelConfigurations.TASK_SETTINGS)) { - taskSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.TASK_SETTINGS); - } else { - taskSettingsMap = Map.of(); - } + Map taskSettingsMap; + // task settings are optional + if (config.containsKey(ModelConfigurations.TASK_SETTINGS)) { + taskSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.TASK_SETTINGS); + } else { + taskSettingsMap = Map.of(); + } - var taskSettings = taskSettingsFromMap(taskType, taskSettingsMap); + var taskSettings = taskSettingsFromMap(taskType, taskSettingsMap); - throwIfNotEmptyMap(config, NAME); - throwIfNotEmptyMap(serviceSettingsMap, NAME); - throwIfNotEmptyMap(taskSettingsMap, NAME); + throwIfNotEmptyMap(config, NAME); + throwIfNotEmptyMap(serviceSettingsMap, NAME); + throwIfNotEmptyMap(taskSettingsMap, NAME); - return new ElserMlNodeModel(inferenceEntityId, taskType, NAME, serviceSettingsBuilder.build(), taskSettings); + parsedModelListener.onResponse( + new ElserInternalModel( + inferenceEntityId, + taskType, + NAME, + (ElserInternalServiceSettings) serviceSettingsBuilder.build(), + taskSettings + ) + ); + } catch (Exception e) { + parsedModelListener.onFailure(e); + } } private static String selectDefaultModelVersionBasedOnClusterArchitecture(Set modelArchitectures) { - // choose a default model version based on the cluster architecture + // choose a default model ID based on the cluster architecture boolean homogenous = modelArchitectures.size() == 1; if (homogenous && modelArchitectures.iterator().next().equals("linux-x86_64")) { // Use the hardware optimized model @@ -122,7 +138,7 @@ private static String selectDefaultModelVersionBasedOnClusterArchitecture(Set config, @@ -132,9 +148,17 @@ public ElserMlNodeModel parsePersistedConfigWithSecrets( } @Override - public ElserMlNodeModel parsePersistedConfig(String inferenceEntityId, TaskType taskType, Map config) { + public ElserInternalModel parsePersistedConfig(String inferenceEntityId, TaskType taskType, Map config) { Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); - var serviceSettingsBuilder = ElserMlNodeServiceSettings.fromMap(serviceSettingsMap); + + // Change from old model_version field name to new model_id field name as of + // TransportVersions.ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED + if (serviceSettingsMap.containsKey(OLD_MODEL_ID_FIELD_NAME)) { + String modelId = ServiceUtils.removeAsType(serviceSettingsMap, OLD_MODEL_ID_FIELD_NAME, String.class); + serviceSettingsMap.put(ElserInternalServiceSettings.MODEL_ID, modelId); + } + + var serviceSettingsBuilder = ElserInternalServiceSettings.fromMap(serviceSettingsMap); Map taskSettingsMap; // task settings are optional @@ -146,15 +170,21 @@ public ElserMlNodeModel parsePersistedConfig(String inferenceEntityId, TaskType var taskSettings = taskSettingsFromMap(taskType, taskSettingsMap); - return new ElserMlNodeModel(inferenceEntityId, taskType, NAME, serviceSettingsBuilder.build(), taskSettings); + return new ElserInternalModel( + inferenceEntityId, + taskType, + NAME, + (ElserInternalServiceSettings) serviceSettingsBuilder.build(), + taskSettings + ); } @Override public void start(Model model, ActionListener listener) { - if (model instanceof ElserMlNodeModel == false) { + if (model instanceof ElserInternalModel == false) { listener.onFailure( new IllegalStateException( - "Error starting model, [" + model.getConfigurations().getInferenceEntityId() + "] is not an elser model" + "Error starting model, [" + model.getConfigurations().getInferenceEntityId() + "] is not an ELSER model" ) ); return; @@ -167,22 +197,24 @@ public void start(Model model, ActionListener listener) { return; } - var elserModel = (ElserMlNodeModel) model; + client.execute(StartTrainedModelDeploymentAction.INSTANCE, startDeploymentRequest(model), elserNotDownloadedListener(listener)); + } + + private static StartTrainedModelDeploymentAction.Request startDeploymentRequest(Model model) { + var elserModel = (ElserInternalModel) model; var serviceSettings = elserModel.getServiceSettings(); var startRequest = new StartTrainedModelDeploymentAction.Request( - serviceSettings.getModelVariant(), + serviceSettings.getModelId(), model.getConfigurations().getInferenceEntityId() ); startRequest.setNumberOfAllocations(serviceSettings.getNumAllocations()); startRequest.setThreadsPerAllocation(serviceSettings.getNumThreads()); startRequest.setWaitForState(STARTED); - - client.execute(StartTrainedModelDeploymentAction.INSTANCE, startRequest, elserNotDownloadedListener(model, listener)); + return startRequest; } private static ActionListener elserNotDownloadedListener( - Model model, ActionListener listener ) { return new ActionListener<>() { @@ -197,7 +229,7 @@ public void onFailure(Exception e) { listener.onFailure( new ResourceNotFoundException( "Could not start the ELSER service as the ELSER model for this platform cannot be found." - + " ELSER needs to be downloaded before it can be started" + + " ELSER needs to be downloaded before it can be started." ) ); return; @@ -289,18 +321,18 @@ private void checkCompatibleTaskType(TaskType taskType) { @Override public void putModel(Model model, ActionListener listener) { - if (model instanceof ElserMlNodeModel == false) { + if (model instanceof ElserInternalModel == false) { listener.onFailure( new IllegalStateException( - "Error starting model, [" + model.getConfigurations().getInferenceEntityId() + "] is not an elser model" + "Error starting model, [" + model.getConfigurations().getInferenceEntityId() + "] is not an ELSER model" ) ); return; } else { - String modelVariant = ((ElserMlNodeModel) model).getServiceSettings().getModelVariant(); + String modelId = ((ElserInternalModel) model).getServiceSettings().getModelId(); var fieldNames = List.of(); var input = new TrainedModelInput(fieldNames); - var config = TrainedModelConfig.builder().setInput(input).setModelId(modelVariant).build(); + var config = TrainedModelConfig.builder().setInput(input).setModelId(modelId).build(); PutTrainedModelAction.Request putRequest = new PutTrainedModelAction.Request(config, false, true); executeAsyncWithOrigin( client, diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceSettings.java new file mode 100644 index 0000000000000..3f345a4410091 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceSettings.java @@ -0,0 +1,122 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.elser; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.inference.services.ServiceUtils; +import org.elasticsearch.xpack.inference.services.settings.InternalServiceSettings; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +public class ElserInternalServiceSettings extends InternalServiceSettings { + + public static final String NAME = "elser_mlnode_service_settings"; + + /** + * Parse the Elser service setting from map and validate the setting values. + * + * If required setting are missing or the values are invalid an + * {@link ValidationException} is thrown. + * + * @param map Source map containing the config + * @return The {@code ElserInternalServiceSettings} + */ + public static ElserInternalServiceSettings.Builder fromMap(Map map) { + ValidationException validationException = new ValidationException(); + Integer numAllocations = ServiceUtils.removeAsType(map, NUM_ALLOCATIONS, Integer.class); + Integer numThreads = ServiceUtils.removeAsType(map, NUM_THREADS, Integer.class); + + validateParameters(numAllocations, validationException, numThreads); + + String model_id = ServiceUtils.removeAsType(map, MODEL_ID, String.class); + if (model_id != null && ElserInternalService.VALID_ELSER_MODEL_IDS.contains(model_id) == false) { + validationException.addValidationError("unknown ELSER model id [" + model_id + "]"); + } + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + var builder = new InternalServiceSettings.Builder() { + @Override + public ElserInternalServiceSettings build() { + return new ElserInternalServiceSettings(getNumAllocations(), getNumThreads(), getModelId()); + } + }; + builder.setNumAllocations(numAllocations); + builder.setNumThreads(numThreads); + builder.setModelId(model_id); + return builder; + } + + public ElserInternalServiceSettings(int numAllocations, int numThreads, String modelId) { + super(numAllocations, numThreads, modelId); + Objects.requireNonNull(modelId); + } + + public ElserInternalServiceSettings(StreamInput in) throws IOException { + super( + in.readVInt(), + in.readVInt(), + transportVersionIsCompatibleWithElserModelVersion(in.getTransportVersion()) + ? in.readString() + : ElserInternalService.ELSER_V2_MODEL + ); + } + + static boolean transportVersionIsCompatibleWithElserModelVersion(TransportVersion transportVersion) { + var nextNonPatchVersion = TransportVersions.PLUGIN_DESCRIPTOR_OPTIONAL_CLASSNAME; + + if (transportVersion.onOrAfter(TransportVersions.ELSER_SERVICE_MODEL_VERSION_ADDED)) { + return true; + } else { + return transportVersion.onOrAfter(TransportVersions.V_8_11_X) && transportVersion.before(nextNonPatchVersion); + } + } + + @Override + public String getWriteableName() { + return ElserInternalServiceSettings.NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.V_8_11_X; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(getNumAllocations()); + out.writeVInt(getNumThreads()); + if (transportVersionIsCompatibleWithElserModelVersion(out.getTransportVersion())) { + out.writeString(getModelId()); + } + } + + @Override + public int hashCode() { + return Objects.hash(NAME, getNumAllocations(), getNumThreads(), getModelId()); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ElserInternalServiceSettings that = (ElserInternalServiceSettings) o; + return getNumAllocations() == that.getNumAllocations() + && getNumThreads() == that.getNumThreads() + && Objects.equals(getModelId(), that.getModelId()); + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettings.java deleted file mode 100644 index 91edf4a2de09c..0000000000000 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettings.java +++ /dev/null @@ -1,188 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.services.elser; - -import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; -import org.elasticsearch.common.ValidationException; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.inference.ModelConfigurations; -import org.elasticsearch.inference.ServiceSettings; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.inference.services.ServiceUtils; - -import java.io.IOException; -import java.util.Map; -import java.util.Objects; - -public class ElserMlNodeServiceSettings implements ServiceSettings { - - public static final String NAME = "elser_mlnode_service_settings"; - public static final String NUM_ALLOCATIONS = "num_allocations"; - public static final String NUM_THREADS = "num_threads"; - public static final String MODEL_VERSION = "model_version"; - - private final int numAllocations; - private final int numThreads; - private final String modelVariant; - - /** - * Parse the Elser service setting from map and validate the setting values. - * - * If required setting are missing or the values are invalid an - * {@link ValidationException} is thrown. - * - * @param map Source map containg the config - * @return The {@code ElserMlNodeServiceSettings} - */ - public static ElserMlNodeServiceSettings.Builder fromMap(Map map) { - ValidationException validationException = new ValidationException(); - Integer numAllocations = ServiceUtils.removeAsType(map, NUM_ALLOCATIONS, Integer.class); - Integer numThreads = ServiceUtils.removeAsType(map, NUM_THREADS, Integer.class); - - if (numAllocations == null) { - validationException.addValidationError( - ServiceUtils.missingSettingErrorMsg(NUM_ALLOCATIONS, ModelConfigurations.SERVICE_SETTINGS) - ); - } else if (numAllocations < 1) { - validationException.addValidationError(mustBeAPositiveNumberError(NUM_ALLOCATIONS, numAllocations)); - } - - if (numThreads == null) { - validationException.addValidationError(ServiceUtils.missingSettingErrorMsg(NUM_THREADS, ModelConfigurations.SERVICE_SETTINGS)); - } else if (numThreads < 1) { - validationException.addValidationError(mustBeAPositiveNumberError(NUM_THREADS, numThreads)); - } - - String version = ServiceUtils.removeAsType(map, MODEL_VERSION, String.class); - if (version != null && ElserMlNodeService.VALID_ELSER_MODELS.contains(version) == false) { - validationException.addValidationError("unknown ELSER model version [" + version + "]"); - } - - if (validationException.validationErrors().isEmpty() == false) { - throw validationException; - } - - var builder = new Builder(); - builder.setNumAllocations(numAllocations); - builder.setNumThreads(numThreads); - builder.setModelVariant(version); - return builder; - } - - public ElserMlNodeServiceSettings(int numAllocations, int numThreads, String variant) { - this.numAllocations = numAllocations; - this.numThreads = numThreads; - this.modelVariant = Objects.requireNonNull(variant); - } - - public ElserMlNodeServiceSettings(StreamInput in) throws IOException { - numAllocations = in.readVInt(); - numThreads = in.readVInt(); - if (transportVersionIsCompatibleWithElserModelVersion(in.getTransportVersion())) { - modelVariant = in.readString(); - } else { - modelVariant = ElserMlNodeService.ELSER_V2_MODEL; - } - } - - static boolean transportVersionIsCompatibleWithElserModelVersion(TransportVersion transportVersion) { - var nextNonPatchVersion = TransportVersions.PLUGIN_DESCRIPTOR_OPTIONAL_CLASSNAME; - - if (transportVersion.onOrAfter(TransportVersions.ELSER_SERVICE_MODEL_VERSION_ADDED)) { - return true; - } else { - return transportVersion.onOrAfter(TransportVersions.V_8_11_X) && transportVersion.before(nextNonPatchVersion); - } - } - - public int getNumAllocations() { - return numAllocations; - } - - public int getNumThreads() { - return numThreads; - } - - public String getModelVariant() { - return modelVariant; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(NUM_ALLOCATIONS, numAllocations); - builder.field(NUM_THREADS, numThreads); - builder.field(MODEL_VERSION, modelVariant); - builder.endObject(); - return builder; - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_8_11_X; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeVInt(numAllocations); - out.writeVInt(numThreads); - if (transportVersionIsCompatibleWithElserModelVersion(out.getTransportVersion())) { - out.writeString(modelVariant); - } - } - - @Override - public int hashCode() { - return Objects.hash(numAllocations, numThreads, modelVariant); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ElserMlNodeServiceSettings that = (ElserMlNodeServiceSettings) o; - return numAllocations == that.numAllocations && numThreads == that.numThreads && Objects.equals(modelVariant, that.modelVariant); - } - - private static String mustBeAPositiveNumberError(String settingName, int value) { - return "Invalid value [" + value + "]. [" + settingName + "] must be a positive integer"; - } - - public static class Builder { - private int numAllocations; - private int numThreads; - private String modelVariant; - - public void setNumAllocations(int numAllocations) { - this.numAllocations = numAllocations; - } - - public void setNumThreads(int numThreads) { - this.numThreads = numThreads; - } - - public void setModelVariant(String modelVariant) { - this.modelVariant = modelVariant; - } - - public String getModelVariant() { - return modelVariant; - } - - public ElserMlNodeServiceSettings build() { - return new ElserMlNodeServiceSettings(numAllocations, numThreads, modelVariant); - } - } -} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java index 5a57699e03c10..b592267b7971d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseService.java @@ -38,26 +38,31 @@ public HuggingFaceBaseService(SetOnce factory, SetOnce } @Override - public HuggingFaceModel parseRequestConfig( + public void parseRequestConfig( String inferenceEntityId, TaskType taskType, Map config, - Set platformArchitectures + Set platformArchitectures, + ActionListener parsedModelListener ) { - Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); - - var model = createModel( - inferenceEntityId, - taskType, - serviceSettingsMap, - serviceSettingsMap, - TaskType.unsupportedTaskTypeErrorMsg(taskType, name()) - ); - - throwIfNotEmptyMap(config, name()); - throwIfNotEmptyMap(serviceSettingsMap, name()); - - return model; + try { + Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); + + var model = createModel( + inferenceEntityId, + taskType, + serviceSettingsMap, + serviceSettingsMap, + TaskType.unsupportedTaskTypeErrorMsg(taskType, name()) + ); + + throwIfNotEmptyMap(config, name()); + throwIfNotEmptyMap(serviceSettingsMap, name()); + + parsedModelListener.onResponse(model); + } catch (Exception e) { + parsedModelListener.onFailure(e); + } } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java index 8071513b817c9..9fe5dba2ac030 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java @@ -52,30 +52,35 @@ public String name() { } @Override - public OpenAiModel parseRequestConfig( + public void parseRequestConfig( String inferenceEntityId, TaskType taskType, Map config, - Set platformArchitectures + Set platformArchitectures, + ActionListener parsedModelListener ) { - Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); - Map taskSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.TASK_SETTINGS); + try { + Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); + Map taskSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.TASK_SETTINGS); - OpenAiModel model = createModel( - inferenceEntityId, - taskType, - serviceSettingsMap, - taskSettingsMap, - serviceSettingsMap, - TaskType.unsupportedTaskTypeErrorMsg(taskType, NAME), - true - ); + OpenAiModel model = createModel( + inferenceEntityId, + taskType, + serviceSettingsMap, + taskSettingsMap, + serviceSettingsMap, + TaskType.unsupportedTaskTypeErrorMsg(taskType, NAME), + true + ); - throwIfNotEmptyMap(config, NAME); - throwIfNotEmptyMap(serviceSettingsMap, NAME); - throwIfNotEmptyMap(taskSettingsMap, NAME); + throwIfNotEmptyMap(config, NAME); + throwIfNotEmptyMap(serviceSettingsMap, NAME); + throwIfNotEmptyMap(taskSettingsMap, NAME); - return model; + parsedModelListener.onResponse(model); + } catch (Exception e) { + parsedModelListener.onFailure(e); + } } private static OpenAiModel createModelWithoutLoggingDeprecations( diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/InternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/InternalServiceSettings.java new file mode 100644 index 0000000000000..ca68bc7d29c1f --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/InternalServiceSettings.java @@ -0,0 +1,129 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.settings; + +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.ServiceSettings; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.ServiceUtils; + +import java.io.IOException; +import java.util.Objects; + +public abstract class InternalServiceSettings implements ServiceSettings { + + public static final String NUM_ALLOCATIONS = "num_allocations"; + public static final String NUM_THREADS = "num_threads"; + public static final String MODEL_ID = "model_id"; + + private final int numAllocations; + private final int numThreads; + private final String modelId; + + public InternalServiceSettings(int numAllocations, int numThreads, String modelId) { + this.numAllocations = numAllocations; + this.numThreads = numThreads; + this.modelId = modelId; + } + + protected static void validateParameters(Integer numAllocations, ValidationException validationException, Integer numThreads) { + if (numAllocations == null) { + validationException.addValidationError( + ServiceUtils.missingSettingErrorMsg(NUM_ALLOCATIONS, ModelConfigurations.SERVICE_SETTINGS) + ); + } else if (numAllocations < 1) { + validationException.addValidationError(ServiceUtils.mustBeAPositiveNumberErrorMessage(NUM_ALLOCATIONS, numAllocations)); + } + + if (numThreads == null) { + validationException.addValidationError(ServiceUtils.missingSettingErrorMsg(NUM_THREADS, ModelConfigurations.SERVICE_SETTINGS)); + } else if (numThreads < 1) { + validationException.addValidationError(ServiceUtils.mustBeAPositiveNumberErrorMessage(NUM_THREADS, numThreads)); + } + } + + public int getNumAllocations() { + return numAllocations; + } + + public int getNumThreads() { + return numThreads; + } + + public String getModelId() { + return modelId; + } + + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InternalServiceSettings that = (InternalServiceSettings) o; + return numAllocations == that.numAllocations && numThreads == that.numThreads && Objects.equals(modelId, that.modelId); + } + + @Override + public int hashCode() { + return Objects.hash(numAllocations, numThreads, modelId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(NUM_ALLOCATIONS, getNumAllocations()); + builder.field(NUM_THREADS, getNumThreads()); + builder.field(MODEL_ID, getModelId()); + builder.endObject(); + return builder; + } + + @Override + public boolean isFragment() { + return ServiceSettings.super.isFragment(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(getNumAllocations()); + out.writeVInt(getNumThreads()); + out.writeString(getModelId()); + } + + public abstract static class Builder { + private int numAllocations; + private int numThreads; + private String modelId; + + public abstract InternalServiceSettings build(); + + public void setNumAllocations(int numAllocations) { + this.numAllocations = numAllocations; + } + + public void setNumThreads(int numThreads) { + this.numThreads = numThreads; + } + + public void setModelId(String modelId) { + this.modelId = modelId; + } + + public String getModelId() { + return modelId; + } + + public int getNumAllocations() { + return numAllocations; + } + + public int getNumThreads() { + return numThreads; + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/textembedding/CustomElandInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/textembedding/CustomElandInternalServiceSettings.java new file mode 100644 index 0000000000000..49cf3fdcd9e89 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/textembedding/CustomElandInternalServiceSettings.java @@ -0,0 +1,97 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + * + * this file was contributed to by a generative AI + */ + +package org.elasticsearch.xpack.inference.services.textembedding; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.ServiceUtils; + +import java.io.IOException; +import java.util.Map; + +import static org.elasticsearch.TransportVersions.ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED; + +public class CustomElandInternalServiceSettings extends TextEmbeddingInternalServiceSettings { + + public static final String NAME = "custom_eland_model_internal_service_settings"; + + public CustomElandInternalServiceSettings(int numAllocations, int numThreads, String modelId) { + super(numAllocations, numThreads, modelId); + } + + /** + * Parse the CustomElandServiceSettings from map and validate the setting values. + * + * This method does not verify the model variant + * + * If required setting are missing or the values are invalid an + * {@link ValidationException} is thrown. + * + * @param map Source map containing the config + * @return The {@code CustomElandServiceSettings} builder + */ + public static Builder fromMap(Map map) { + + ValidationException validationException = new ValidationException(); + Integer numAllocations = ServiceUtils.removeAsType(map, NUM_ALLOCATIONS, Integer.class); + Integer numThreads = ServiceUtils.removeAsType(map, NUM_THREADS, Integer.class); + + validateParameters(numAllocations, validationException, numThreads); + + String modelId = ServiceUtils.extractRequiredString(map, MODEL_ID, "ServiceSettings", validationException); + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + var builder = new Builder() { + @Override + public CustomElandInternalServiceSettings build() { + return new CustomElandInternalServiceSettings(getNumAllocations(), getNumThreads(), getModelId()); + } + }; + builder.setNumAllocations(numAllocations); + builder.setNumThreads(numThreads); + builder.setModelId(modelId); + return builder; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return super.toXContent(builder, params); + } + + public CustomElandInternalServiceSettings(StreamInput in) throws IOException { + super(in.readVInt(), in.readVInt(), in.readString()); + } + + @Override + public boolean isFragment() { + return super.isFragment(); + } + + @Override + public String getWriteableName() { + return CustomElandInternalServiceSettings.NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/textembedding/CustomElandModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/textembedding/CustomElandModel.java new file mode 100644 index 0000000000000..5d7b63431841f --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/textembedding/CustomElandModel.java @@ -0,0 +1,79 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.textembedding; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; +import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; + +import static org.elasticsearch.xpack.core.ml.inference.assignment.AllocationStatus.State.STARTED; + +public class CustomElandModel extends TextEmbeddingModel { + + public CustomElandModel( + String inferenceEntityId, + TaskType taskType, + String service, + CustomElandInternalServiceSettings serviceSettings + ) { + super(inferenceEntityId, taskType, service, serviceSettings); + } + + @Override + public CustomElandInternalServiceSettings getServiceSettings() { + return (CustomElandInternalServiceSettings) super.getServiceSettings(); + } + + @Override + StartTrainedModelDeploymentAction.Request getStartTrainedModelDeploymentActionRequest() { + var startRequest = new StartTrainedModelDeploymentAction.Request( + this.getServiceSettings().getModelId(), + this.getInferenceEntityId() + ); + startRequest.setNumberOfAllocations(this.getServiceSettings().getNumAllocations()); + startRequest.setThreadsPerAllocation(this.getServiceSettings().getNumThreads()); + startRequest.setWaitForState(STARTED); + + return startRequest; + } + + @Override + ActionListener getCreateTrainedModelAssignmentActionListener( + Model model, + ActionListener listener + ) { + + return new ActionListener<>() { + @Override + public void onResponse(CreateTrainedModelAssignmentAction.Response response) { + listener.onResponse(Boolean.TRUE); + } + + @Override + public void onFailure(Exception e) { + if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { + listener.onFailure( + new ResourceNotFoundException( + "Could not start the TextEmbeddingService service as the " + + "custom eland model [{0}] for this platform cannot be found." + + " Custom models need to be loaded into the cluster with eland before they can be started.", + getServiceSettings().getModelId() + ) + ); + return; + } + listener.onFailure(e); + } + }; + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/textembedding/MultilingualE5SmallInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/textembedding/MultilingualE5SmallInternalServiceSettings.java new file mode 100644 index 0000000000000..cab9d9d863885 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/textembedding/MultilingualE5SmallInternalServiceSettings.java @@ -0,0 +1,106 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + * + * this file was contributed to by a generative AI + */ + +package org.elasticsearch.xpack.inference.services.textembedding; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.inference.services.ServiceUtils; +import org.elasticsearch.xpack.inference.services.settings.InternalServiceSettings; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Map; + +import static org.elasticsearch.TransportVersions.ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED; + +public class MultilingualE5SmallInternalServiceSettings extends TextEmbeddingInternalServiceSettings { + + public static final String NAME = "multilingual_e5_small_service_settings"; + + public MultilingualE5SmallInternalServiceSettings(int numAllocations, int numThreads, String modelId) { + super(numAllocations, numThreads, modelId); + } + + public MultilingualE5SmallInternalServiceSettings(StreamInput in) throws IOException { + super(in.readVInt(), in.readVInt(), in.readString()); + } + + /** + * Parse the MultilingualE5SmallServiceSettings from map and validate the setting values. + * + * If required setting are missing or the values are invalid an + * {@link ValidationException} is thrown. + * + * @param map Source map containing the config + * @return The {@code MultilingualE5SmallServiceSettings} builder + */ + public static MultilingualE5SmallInternalServiceSettings.Builder fromMap(Map map) { + ValidationException validationException = new ValidationException(); + Integer numAllocations = ServiceUtils.removeAsType(map, NUM_ALLOCATIONS, Integer.class); + Integer numThreads = ServiceUtils.removeAsType(map, NUM_THREADS, Integer.class); + + validateParameters(numAllocations, validationException, numThreads); + + String modelId = ServiceUtils.removeAsType(map, MODEL_ID, String.class); + if (modelId != null) { + if (TextEmbeddingInternalService.MULTILINGUAL_E5_SMALL_VALID_IDS.contains(modelId) == false) { + validationException.addValidationError( + "unknown Multilingual-E5-Small model ID [" + + modelId + + "]. Valid IDs are " + + Arrays.toString(TextEmbeddingInternalService.MULTILINGUAL_E5_SMALL_VALID_IDS.toArray()) + ); + } + } + + if (validationException.validationErrors().isEmpty() == false) { + throw validationException; + } + + var builder = new InternalServiceSettings.Builder() { + @Override + public MultilingualE5SmallInternalServiceSettings build() { + return new MultilingualE5SmallInternalServiceSettings(getNumAllocations(), getNumThreads(), getModelId()); + } + }; + builder.setNumAllocations(numAllocations); + builder.setNumThreads(numThreads); + builder.setModelId(modelId); + return builder; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return super.toXContent(builder, params); + } + + @Override + public boolean isFragment() { + return super.isFragment(); + } + + @Override + public String getWriteableName() { + return MultilingualE5SmallInternalServiceSettings.NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/textembedding/MultilingualE5SmallModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/textembedding/MultilingualE5SmallModel.java new file mode 100644 index 0000000000000..4d3c15f00ea71 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/textembedding/MultilingualE5SmallModel.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + * + * this file was contributed to by a generative AI + */ + +package org.elasticsearch.xpack.inference.services.textembedding; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; +import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; + +import static org.elasticsearch.xpack.core.ml.inference.assignment.AllocationStatus.State.STARTED; + +public class MultilingualE5SmallModel extends TextEmbeddingModel { + + public MultilingualE5SmallModel( + String inferenceEntityId, + TaskType taskType, + String service, + MultilingualE5SmallInternalServiceSettings serviceSettings + ) { + super(inferenceEntityId, taskType, service, serviceSettings); + } + + @Override + public MultilingualE5SmallInternalServiceSettings getServiceSettings() { + return (MultilingualE5SmallInternalServiceSettings) super.getServiceSettings(); + } + + @Override + StartTrainedModelDeploymentAction.Request getStartTrainedModelDeploymentActionRequest() { + var startRequest = new StartTrainedModelDeploymentAction.Request( + this.getServiceSettings().getModelId(), + this.getInferenceEntityId() + ); + startRequest.setNumberOfAllocations(this.getServiceSettings().getNumAllocations()); + startRequest.setThreadsPerAllocation(this.getServiceSettings().getNumThreads()); + startRequest.setWaitForState(STARTED); + + return startRequest; + } + + @Override + ActionListener getCreateTrainedModelAssignmentActionListener( + Model model, + ActionListener listener + ) { + + return new ActionListener<>() { + @Override + public void onResponse(CreateTrainedModelAssignmentAction.Response response) { + listener.onResponse(Boolean.TRUE); + } + + @Override + public void onFailure(Exception e) { + if (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) { + listener.onFailure( + new ResourceNotFoundException( + "Could not start the TextEmbeddingService service as the " + + "Multilingual-E5-Small model for this platform cannot be found." + + " Multilingual-E5-Small needs to be downloaded before it can be started" + ) + ); + return; + } + listener.onFailure(e); + } + }; + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/textembedding/TextEmbeddingInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/textembedding/TextEmbeddingInternalService.java new file mode 100644 index 0000000000000..3fe8c1d0df694 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/textembedding/TextEmbeddingInternalService.java @@ -0,0 +1,397 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.textembedding; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.inference.ChunkedInferenceServiceResults; +import org.elasticsearch.inference.ChunkingOptions; +import org.elasticsearch.inference.InferenceResults; +import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.inference.InferenceServiceExtension; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.InputType; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.inference.results.ChunkedTextEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; +import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; +import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.action.PutTrainedModelAction; +import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.action.StopTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelInput; +import org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextExpansionResults; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextEmbeddingConfigUpdate; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextExpansionConfigUpdate; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TokenizationConfigUpdate; +import org.elasticsearch.xpack.inference.services.settings.InternalServiceSettings; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.core.ClientHelper.INFERENCE_ORIGIN; +import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.removeFromMapOrThrowIfNull; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.throwIfNotEmptyMap; +import static org.elasticsearch.xpack.inference.services.settings.InternalServiceSettings.MODEL_ID; + +public class TextEmbeddingInternalService implements InferenceService { + + public static final String NAME = "text_embedding"; + + static final String MULTILINGUAL_E5_SMALL_MODEL_ID = ".multilingual-e5-small"; + static final String MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86 = ".multilingual-e5-small_linux-x86_64"; + public static final Set MULTILINGUAL_E5_SMALL_VALID_IDS = Set.of( + MULTILINGUAL_E5_SMALL_MODEL_ID, + MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86 + ); + + private final OriginSettingClient client; + + private static final Logger logger = LogManager.getLogger(TextEmbeddingInternalService.class); + + public TextEmbeddingInternalService(InferenceServiceExtension.InferenceServiceFactoryContext context) { + this.client = new OriginSettingClient(context.client(), ClientHelper.INFERENCE_ORIGIN); + } + + @Override + public void parseRequestConfig( + String inferenceEntityId, + TaskType taskType, + Map config, + Set platformArchitectures, + ActionListener modelListener + ) { + try { + Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); + String modelId = (String) serviceSettingsMap.get(MODEL_ID); + if (modelId == null) { + throw new IllegalArgumentException("Error parsing request config, model id is missing"); + } + if (MULTILINGUAL_E5_SMALL_VALID_IDS.contains(modelId)) { + e5Case(inferenceEntityId, taskType, config, platformArchitectures, serviceSettingsMap, modelListener); + } else { + throwIfNotEmptyMap(config, name()); + customElandCase(inferenceEntityId, taskType, serviceSettingsMap, modelListener); + } + } catch (Exception e) { + modelListener.onFailure(e); + } + } + + private void customElandCase( + String inferenceEntityId, + TaskType taskType, + Map serviceSettingsMap, + ActionListener modelListener + ) { + String modelId = (String) serviceSettingsMap.get(MODEL_ID); + var request = new GetTrainedModelsAction.Request(modelId); + + var getModelsListener = modelListener.delegateFailureAndWrap((delegate, response) -> { + if (response.getResources().count() < 1) { + throw new IllegalArgumentException( + "Error parsing request config, model id does not match any models available on this platform. Was [" + + modelId + + "]. You may need to load it into the cluster using eland." + ); + } else { + var customElandInternalServiceSettings = (CustomElandInternalServiceSettings) CustomElandInternalServiceSettings.fromMap( + serviceSettingsMap + ).build(); + throwIfNotEmptyMap(serviceSettingsMap, name()); + delegate.onResponse(new CustomElandModel(inferenceEntityId, taskType, name(), customElandInternalServiceSettings)); + } + }); + + client.execute(GetTrainedModelsAction.INSTANCE, request, getModelsListener); + } + + private void e5Case( + String inferenceEntityId, + TaskType taskType, + Map config, + Set platformArchitectures, + Map serviceSettingsMap, + ActionListener modelListener + ) { + var e5ServiceSettings = MultilingualE5SmallInternalServiceSettings.fromMap(serviceSettingsMap); + + if (e5ServiceSettings.getModelId() == null) { + e5ServiceSettings.setModelId(selectDefaultModelVariantBasedOnClusterArchitecture(platformArchitectures)); + } + + if (modelVariantDoesNotMatchArchitecturesAndIsNotPlatformAgnostic(platformArchitectures, e5ServiceSettings)) { + throw new IllegalArgumentException( + "Error parsing request config, model id does not match any models available on this platform. Was [" + + e5ServiceSettings.getModelId() + + "]" + ); + } + + throwIfNotEmptyMap(config, name()); + throwIfNotEmptyMap(serviceSettingsMap, name()); + + modelListener.onResponse( + new MultilingualE5SmallModel( + inferenceEntityId, + taskType, + NAME, + (MultilingualE5SmallInternalServiceSettings) e5ServiceSettings.build() + ) + ); + } + + private static boolean modelVariantDoesNotMatchArchitecturesAndIsNotPlatformAgnostic( + Set platformArchitectures, + InternalServiceSettings.Builder e5ServiceSettings + ) { + return e5ServiceSettings.getModelId().equals(selectDefaultModelVariantBasedOnClusterArchitecture(platformArchitectures)) == false + && e5ServiceSettings.getModelId().equals(MULTILINGUAL_E5_SMALL_MODEL_ID) == false; + } + + @Override + public TextEmbeddingModel parsePersistedConfigWithSecrets( + String inferenceEntityId, + TaskType taskType, + Map config, + Map secrets + ) { + return parsePersistedConfig(inferenceEntityId, taskType, config); + } + + @Override + public TextEmbeddingModel parsePersistedConfig(String inferenceEntityId, TaskType taskType, Map config) { + Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); + + String modelId = (String) serviceSettingsMap.get(MODEL_ID); + if (modelId == null) { + throw new IllegalArgumentException("Error parsing request config, model id is missing"); + } + + if (MULTILINGUAL_E5_SMALL_VALID_IDS.contains(modelId)) { + return new MultilingualE5SmallModel( + inferenceEntityId, + taskType, + NAME, + (MultilingualE5SmallInternalServiceSettings) MultilingualE5SmallInternalServiceSettings.fromMap(serviceSettingsMap).build() + ); + } else { + return new CustomElandModel( + inferenceEntityId, + taskType, + name(), + (CustomElandInternalServiceSettings) CustomElandInternalServiceSettings.fromMap(serviceSettingsMap).build() + ); + } + + } + + @Override + public void infer( + Model model, + List input, + Map taskSettings, + InputType inputType, + ActionListener listener + ) { + try { + checkCompatibleTaskType(model.getConfigurations().getTaskType()); + } catch (Exception e) { + listener.onFailure(e); + return; + } + + var request = InferTrainedModelDeploymentAction.Request.forTextInput( + model.getConfigurations().getInferenceEntityId(), + TextEmbeddingConfigUpdate.EMPTY_INSTANCE, + input, + TimeValue.timeValueSeconds(10) // TODO get timeout from request + ); + + client.execute( + InferTrainedModelDeploymentAction.INSTANCE, + request, + listener.delegateFailureAndWrap((l, inferenceResult) -> l.onResponse(TextEmbeddingResults.of(inferenceResult.getResults()))) + ); + } + + @Override + public void chunkedInfer( + Model model, + List input, + Map taskSettings, + InputType inputType, + ChunkingOptions chunkingOptions, + ActionListener listener + ) { + try { + checkCompatibleTaskType(model.getConfigurations().getTaskType()); + } catch (Exception e) { + listener.onFailure(e); + return; + } + + var configUpdate = chunkingOptions.settingsArePresent() + ? new TokenizationConfigUpdate(chunkingOptions.windowSize(), chunkingOptions.span()) + : TextExpansionConfigUpdate.EMPTY_UPDATE; + + var request = InferTrainedModelDeploymentAction.Request.forTextInput( + model.getConfigurations().getInferenceEntityId(), + configUpdate, + input, + TimeValue.timeValueSeconds(10) // TODO get timeout from request + ); + request.setChunkResults(true); + + client.execute( + InferTrainedModelDeploymentAction.INSTANCE, + request, + listener.delegateFailureAndWrap((l, inferenceResult) -> l.onResponse(translateChunkedResults(inferenceResult.getResults()))) + ); + } + + @Override + public void start(Model model, ActionListener listener) { + if (model instanceof TextEmbeddingModel == false) { + listener.onFailure(notTextEmbeddingModelException(model)); + return; + } + + if (model.getConfigurations().getTaskType() != TaskType.TEXT_EMBEDDING) { + listener.onFailure( + new IllegalStateException(TaskType.unsupportedTaskTypeErrorMsg(model.getConfigurations().getTaskType(), NAME)) + ); + return; + } + + var startRequest = ((TextEmbeddingModel) model).getStartTrainedModelDeploymentActionRequest(); + var responseListener = ((TextEmbeddingModel) model).getCreateTrainedModelAssignmentActionListener(model, listener); + + client.execute(StartTrainedModelDeploymentAction.INSTANCE, startRequest, responseListener); + } + + @Override + public void stop(String inferenceEntityId, ActionListener listener) { + client.execute( + StopTrainedModelDeploymentAction.INSTANCE, + new StopTrainedModelDeploymentAction.Request(inferenceEntityId), + listener.delegateFailureAndWrap((delegatedResponseListener, response) -> delegatedResponseListener.onResponse(Boolean.TRUE)) + ); + } + + @Override + public void putModel(Model model, ActionListener listener) { + if (model instanceof TextEmbeddingModel == false) { + listener.onFailure(notTextEmbeddingModelException(model)); + return; + } else if (model instanceof MultilingualE5SmallModel e5Model) { + String modelId = e5Model.getServiceSettings().getModelId(); + var fieldNames = List.of(); + var input = new TrainedModelInput(fieldNames); + var config = TrainedModelConfig.builder().setInput(input).setModelId(modelId).build(); + PutTrainedModelAction.Request putRequest = new PutTrainedModelAction.Request(config, false, true); + executeAsyncWithOrigin( + client, + INFERENCE_ORIGIN, + PutTrainedModelAction.INSTANCE, + putRequest, + listener.delegateFailure((l, r) -> { + l.onResponse(Boolean.TRUE); + }) + ); + } else if (model instanceof CustomElandModel elandModel) { + logger.info("Custom eland model detected, model must have been already loaded into the cluster with eland."); + listener.onResponse(Boolean.TRUE); + } else { + listener.onFailure( + new IllegalArgumentException( + "Can not download model automatically for [" + + model.getConfigurations().getInferenceEntityId() + + "] you may need to download it through the trained models API or with eland." + ) + ); + return; + } + } + + private static IllegalStateException notTextEmbeddingModelException(Model model) { + return new IllegalStateException( + "Error starting model, [" + model.getConfigurations().getInferenceEntityId() + "] is not a text embedding model" + ); + } + + private void checkCompatibleTaskType(TaskType taskType) { + if (TaskType.TEXT_EMBEDDING.isAnyOrSame(taskType) == false) { + throw new ElasticsearchStatusException(TaskType.unsupportedTaskTypeErrorMsg(taskType, NAME), RestStatus.BAD_REQUEST); + } + } + + private ChunkedTextEmbeddingResults translateChunkedResults(List inferenceResults) { + if (inferenceResults.size() != 1) { + throw new ElasticsearchStatusException("Expected exactly one chunked text embedding result", RestStatus.INTERNAL_SERVER_ERROR); + } + + if (inferenceResults.get( + 0 + ) instanceof org.elasticsearch.xpack.core.ml.inference.results.ChunkedTextEmbeddingResults mlChunkedResult) { + return ChunkedTextEmbeddingResults.ofMlResult(mlChunkedResult); + } else { + throw new ElasticsearchStatusException( + "Expected a chunked inference [{}] received [{}]", + RestStatus.INTERNAL_SERVER_ERROR, + ChunkedTextExpansionResults.NAME, + inferenceResults.get(0).getWriteableName() + ); + } + } + + @Override + public boolean isInClusterService() { + return true; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED; + } + + @Override + public void close() throws IOException {} + + @Override + public String name() { + return NAME; + } + + private static String selectDefaultModelVariantBasedOnClusterArchitecture(Set modelArchitectures) { + // choose a default model version based on the cluster architecture + boolean homogenous = modelArchitectures.size() == 1; + if (homogenous && modelArchitectures.iterator().next().equals("linux-x86_64")) { + // Use the hardware optimized model + return MULTILINGUAL_E5_SMALL_MODEL_ID_LINUX_X86; + } else { + // default to the platform-agnostic model + return MULTILINGUAL_E5_SMALL_MODEL_ID; + } + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/textembedding/TextEmbeddingInternalServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/textembedding/TextEmbeddingInternalServiceSettings.java new file mode 100644 index 0000000000000..fcc96703e221f --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/textembedding/TextEmbeddingInternalServiceSettings.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + * + * This file was contributed to by a generative AI + */ + +package org.elasticsearch.xpack.inference.services.textembedding; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.xpack.inference.services.settings.InternalServiceSettings; + +import java.io.IOException; + +import static org.elasticsearch.TransportVersions.ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED; + +public class TextEmbeddingInternalServiceSettings extends InternalServiceSettings { + + public static final String NAME = "text_embedding_internal_service_settings"; + + public TextEmbeddingInternalServiceSettings(int numAllocations, int numThreads, String modelVariant) { + super(numAllocations, numThreads, modelVariant); + } + + public TextEmbeddingInternalServiceSettings(StreamInput in) throws IOException { + super(in.readVInt(), in.readVInt(), in.readString()); + } + + @Override + public String getWriteableName() { + return TextEmbeddingInternalServiceSettings.NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED; + } + +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/textembedding/TextEmbeddingModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/textembedding/TextEmbeddingModel.java new file mode 100644 index 0000000000000..800e2928c7afa --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/textembedding/TextEmbeddingModel.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.textembedding; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.xpack.core.ml.action.CreateTrainedModelAssignmentAction; +import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; + +public abstract class TextEmbeddingModel extends Model { + + public TextEmbeddingModel( + String inferenceEntityId, + TaskType taskType, + String service, + TextEmbeddingInternalServiceSettings serviceSettings + ) { + super(new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings)); + } + + @Override + public TextEmbeddingInternalServiceSettings getServiceSettings() { + return (TextEmbeddingInternalServiceSettings) super.getServiceSettings(); + } + + abstract StartTrainedModelDeploymentAction.Request getStartTrainedModelDeploymentActionRequest(); + + abstract ActionListener getCreateTrainedModelAssignmentActionListener( + Model model, + ActionListener listener + ); +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelConfigurationsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelConfigurationsTests.java index 16c3564f14328..d52595a5899a8 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelConfigurationsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/ModelConfigurationsTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.inference.TaskSettings; import org.elasticsearch.inference.TaskType; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeServiceSettingsTests; +import org.elasticsearch.xpack.inference.services.elser.ElserInternalServiceSettingsTests; import org.elasticsearch.xpack.inference.services.elser.ElserMlNodeTaskSettings; public class ModelConfigurationsTests extends AbstractWireSerializingTestCase { @@ -60,7 +60,7 @@ public static ModelConfigurations mutateTestInstance(ModelConfigurations instanc } private static ServiceSettings randomServiceSettings() { - return ElserMlNodeServiceSettingsTests.createRandom(); + return ElserInternalServiceSettingsTests.createRandom(); } private static TaskSettings randomTaskSettings(TaskType taskType) { diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java index 873582ee353c7..4d2d8ee5d7fcf 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java @@ -132,13 +132,14 @@ public String name() { } @Override - public Model parseRequestConfig( + public void parseRequestConfig( String inferenceEntityId, TaskType taskType, Map config, - Set platfromArchitectures + Set platfromArchitectures, + ActionListener parsedModelListener ) { - return null; + parsedModelListener.onResponse(null); } @Override diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java index 32c4acd109685..f9b76dfcf2528 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java @@ -3,6 +3,8 @@ * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. + * + * this file was contributed to by a generative AI */ package org.elasticsearch.xpack.inference.services.cohere; @@ -11,6 +13,7 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; @@ -96,7 +99,22 @@ public void testParseRequestConfig_CreatesACohereEmbeddingsModel() throws IOExce new SetOnce<>(createWithEmptySettings(threadPool)) ) ) { - var model = service.parseRequestConfig( + + ActionListener modelListener = ActionListener.wrap(model -> { + MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); + + var embeddingsModel = (CohereEmbeddingsModel) model; + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModelId(), is("model")); + MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getEmbeddingType(), is(CohereEmbeddingType.FLOAT)); + MatcherAssert.assertThat( + embeddingsModel.getTaskSettings(), + is(new CohereEmbeddingsTaskSettings(InputType.INGEST, CohereTruncation.START)) + ); + MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + }, e -> fail("Model parsing should have succeeded " + e.getMessage())); + + service.parseRequestConfig( "id", TaskType.TEXT_EMBEDDING, getRequestConfigMap( @@ -104,20 +122,10 @@ public void testParseRequestConfig_CreatesACohereEmbeddingsModel() throws IOExce getTaskSettingsMap(InputType.INGEST, CohereTruncation.START), getSecretSettingsMap("secret") ), - Set.of() + Set.of(), + modelListener ); - MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); - - var embeddingsModel = (CohereEmbeddingsModel) model; - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getUri().toString(), is("url")); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getCommonSettings().getModelId(), is("model")); - MatcherAssert.assertThat(embeddingsModel.getServiceSettings().getEmbeddingType(), is(CohereEmbeddingType.FLOAT)); - MatcherAssert.assertThat( - embeddingsModel.getTaskSettings(), - is(new CohereEmbeddingsTaskSettings(InputType.INGEST, CohereTruncation.START)) - ); - MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } } @@ -128,27 +136,33 @@ public void testParseRequestConfig_ThrowsUnsupportedModelType() throws IOExcepti new SetOnce<>(createWithEmptySettings(threadPool)) ) ) { - var thrownException = expectThrows( + + var failureListener = getModelListenerForException( ElasticsearchStatusException.class, - () -> service.parseRequestConfig( - "id", - TaskType.SPARSE_EMBEDDING, - getRequestConfigMap( - CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", null, null), - getTaskSettingsMapEmpty(), - getSecretSettingsMap("secret") - ), - Set.of() - ) + "The [cohere] service does not support task type [sparse_embedding]" ); - MatcherAssert.assertThat( - thrownException.getMessage(), - is("The [cohere] service does not support task type [sparse_embedding]") + service.parseRequestConfig( + "id", + TaskType.SPARSE_EMBEDDING, + getRequestConfigMap( + CohereEmbeddingsServiceSettingsTests.getServiceSettingsMap("url", null, null), + getTaskSettingsMapEmpty(), + getSecretSettingsMap("secret") + ), + Set.of(), + failureListener ); } } + private static ActionListener getModelListenerForException(Class exceptionClass, String expectedMessage) { + return ActionListener.wrap((model) -> fail("Model parsing should have failed"), e -> { + MatcherAssert.assertThat(e, instanceOf(exceptionClass)); + MatcherAssert.assertThat(e.getMessage(), is(expectedMessage)); + }); + } + public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInConfig() throws IOException { try ( var service = new CohereService( @@ -163,15 +177,11 @@ public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInConfig() throws I ); config.put("extra_key", "value"); - var thrownException = expectThrows( + var failureListener = getModelListenerForException( ElasticsearchStatusException.class, - () -> service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of()) - ); - - MatcherAssert.assertThat( - thrownException.getMessage(), - is("Model configuration contains settings [{extra_key=value}] unknown to the [cohere] service") + "Model configuration contains settings [{extra_key=value}] unknown to the [cohere] service" ); + service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of(), failureListener); } } @@ -187,15 +197,11 @@ public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInServiceSettingsMa var config = getRequestConfigMap(serviceSettings, getTaskSettingsMap(null, null), getSecretSettingsMap("secret")); - var thrownException = expectThrows( + var failureListener = getModelListenerForException( ElasticsearchStatusException.class, - () -> service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of()) - ); - - MatcherAssert.assertThat( - thrownException.getMessage(), - is("Model configuration contains settings [{extra_key=value}] unknown to the [cohere] service") + "Model configuration contains settings [{extra_key=value}] unknown to the [cohere] service" ); + service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of(), failureListener); } } @@ -215,15 +221,12 @@ public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInTaskSettingsMap() getSecretSettingsMap("secret") ); - var thrownException = expectThrows( + var failureListener = getModelListenerForException( ElasticsearchStatusException.class, - () -> service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of()) + "Model configuration contains settings [{extra_key=value}] unknown to the [cohere] service" ); + service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of(), failureListener); - MatcherAssert.assertThat( - thrownException.getMessage(), - is("Model configuration contains settings [{extra_key=value}] unknown to the [cohere] service") - ); } } @@ -243,15 +246,11 @@ public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInSecretSettingsMap secretSettingsMap ); - var thrownException = expectThrows( + var failureListener = getModelListenerForException( ElasticsearchStatusException.class, - () -> service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of()) - ); - - MatcherAssert.assertThat( - thrownException.getMessage(), - is("Model configuration contains settings [{extra_key=value}] unknown to the [cohere] service") + "Model configuration contains settings [{extra_key=value}] unknown to the [cohere] service" ); + service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of(), failureListener); } } @@ -262,7 +261,16 @@ public void testParseRequestConfig_CreatesACohereEmbeddingsModelWithoutUrl() thr new SetOnce<>(createWithEmptySettings(threadPool)) ) ) { - var model = service.parseRequestConfig( + var modelListener = ActionListener.wrap((model) -> { + MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); + + var embeddingsModel = (CohereEmbeddingsModel) model; + assertNull(embeddingsModel.getServiceSettings().getCommonSettings().getUri()); + MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(CohereEmbeddingsTaskSettings.EMPTY_SETTINGS)); + MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + }, (e) -> fail("Model parsing should have succeeded " + e.getMessage())); + + service.parseRequestConfig( "id", TaskType.TEXT_EMBEDDING, getRequestConfigMap( @@ -270,15 +278,10 @@ public void testParseRequestConfig_CreatesACohereEmbeddingsModelWithoutUrl() thr getTaskSettingsMapEmpty(), getSecretSettingsMap("secret") ), - Set.of() + Set.of(), + modelListener ); - MatcherAssert.assertThat(model, instanceOf(CohereEmbeddingsModel.class)); - - var embeddingsModel = (CohereEmbeddingsModel) model; - assertNull(embeddingsModel.getServiceSettings().getCommonSettings().getUri()); - MatcherAssert.assertThat(embeddingsModel.getTaskSettings(), is(CohereEmbeddingsTaskSettings.EMPTY_SETTINGS)); - MatcherAssert.assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceSettingsTests.java similarity index 51% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettingsTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceSettingsTests.java index c6d8d852a47c2..3adb6481d945b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceSettingsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceSettingsTests.java @@ -19,69 +19,69 @@ import static org.hamcrest.Matchers.containsString; -public class ElserMlNodeServiceSettingsTests extends AbstractWireSerializingTestCase { +public class ElserInternalServiceSettingsTests extends AbstractWireSerializingTestCase { - public static ElserMlNodeServiceSettings createRandom() { - return new ElserMlNodeServiceSettings( + public static ElserInternalServiceSettings createRandom() { + return new ElserInternalServiceSettings( randomIntBetween(1, 4), randomIntBetween(1, 2), - randomFrom(ElserMlNodeService.VALID_ELSER_MODELS) + randomFrom(ElserInternalService.VALID_ELSER_MODEL_IDS) ); } public void testFromMap_DefaultModelVersion() { - var serviceSettingsBuilder = ElserMlNodeServiceSettings.fromMap( - new HashMap<>(Map.of(ElserMlNodeServiceSettings.NUM_ALLOCATIONS, 1, ElserMlNodeServiceSettings.NUM_THREADS, 4)) + var serviceSettingsBuilder = ElserInternalServiceSettings.fromMap( + new HashMap<>(Map.of(ElserInternalServiceSettings.NUM_ALLOCATIONS, 1, ElserInternalServiceSettings.NUM_THREADS, 4)) ); - assertNull(serviceSettingsBuilder.getModelVariant()); + assertNull(serviceSettingsBuilder.getModelId()); } public void testFromMap() { - var serviceSettings = ElserMlNodeServiceSettings.fromMap( + var serviceSettings = ElserInternalServiceSettings.fromMap( new HashMap<>( Map.of( - ElserMlNodeServiceSettings.NUM_ALLOCATIONS, + ElserInternalServiceSettings.NUM_ALLOCATIONS, 1, - ElserMlNodeServiceSettings.NUM_THREADS, + ElserInternalServiceSettings.NUM_THREADS, 4, - ElserMlNodeServiceSettings.MODEL_VERSION, + ElserInternalServiceSettings.MODEL_ID, ".elser_model_1" ) ) ).build(); - assertEquals(new ElserMlNodeServiceSettings(1, 4, ".elser_model_1"), serviceSettings); + assertEquals(new ElserInternalServiceSettings(1, 4, ".elser_model_1"), serviceSettings); } public void testFromMapInvalidVersion() { var e = expectThrows( ValidationException.class, - () -> ElserMlNodeServiceSettings.fromMap( + () -> ElserInternalServiceSettings.fromMap( new HashMap<>( Map.of( - ElserMlNodeServiceSettings.NUM_ALLOCATIONS, + ElserInternalServiceSettings.NUM_ALLOCATIONS, 1, - ElserMlNodeServiceSettings.NUM_THREADS, + ElserInternalServiceSettings.NUM_THREADS, 4, - "model_version", + "model_id", ".elser_model_27" ) ) ) ); - assertThat(e.getMessage(), containsString("unknown ELSER model version [.elser_model_27]")); + assertThat(e.getMessage(), containsString("unknown ELSER model id [.elser_model_27]")); } public void testFromMapMissingOptions() { var e = expectThrows( ValidationException.class, - () -> ElserMlNodeServiceSettings.fromMap(new HashMap<>(Map.of(ElserMlNodeServiceSettings.NUM_ALLOCATIONS, 1))) + () -> ElserInternalServiceSettings.fromMap(new HashMap<>(Map.of(ElserInternalServiceSettings.NUM_ALLOCATIONS, 1))) ); assertThat(e.getMessage(), containsString("[service_settings] does not contain the required setting [num_threads]")); e = expectThrows( ValidationException.class, - () -> ElserMlNodeServiceSettings.fromMap(new HashMap<>(Map.of(ElserMlNodeServiceSettings.NUM_THREADS, 1))) + () -> ElserInternalServiceSettings.fromMap(new HashMap<>(Map.of(ElserInternalServiceSettings.NUM_THREADS, 1))) ); assertThat(e.getMessage(), containsString("[service_settings] does not contain the required setting [num_allocations]")); @@ -89,20 +89,20 @@ public void testFromMapMissingOptions() { public void testTransportVersionIsCompatibleWithElserModelVersion() { assertTrue( - ElserMlNodeServiceSettings.transportVersionIsCompatibleWithElserModelVersion( + ElserInternalServiceSettings.transportVersionIsCompatibleWithElserModelVersion( TransportVersions.ELSER_SERVICE_MODEL_VERSION_ADDED ) ); - assertTrue(ElserMlNodeServiceSettings.transportVersionIsCompatibleWithElserModelVersion(TransportVersions.V_8_11_X)); + assertTrue(ElserInternalServiceSettings.transportVersionIsCompatibleWithElserModelVersion(TransportVersions.V_8_11_X)); - assertFalse(ElserMlNodeServiceSettings.transportVersionIsCompatibleWithElserModelVersion(TransportVersions.V_8_10_X)); + assertFalse(ElserInternalServiceSettings.transportVersionIsCompatibleWithElserModelVersion(TransportVersions.V_8_10_X)); assertFalse( - ElserMlNodeServiceSettings.transportVersionIsCompatibleWithElserModelVersion( + ElserInternalServiceSettings.transportVersionIsCompatibleWithElserModelVersion( TransportVersions.PLUGIN_DESCRIPTOR_OPTIONAL_CLASSNAME ) ); assertFalse( - ElserMlNodeServiceSettings.transportVersionIsCompatibleWithElserModelVersion( + ElserInternalServiceSettings.transportVersionIsCompatibleWithElserModelVersion( TransportVersions.UNIVERSAL_PROFILING_LICENSE_ADDED ) ); @@ -110,18 +110,18 @@ public void testTransportVersionIsCompatibleWithElserModelVersion() { public void testBwcWrite() throws IOException { { - var settings = new ElserMlNodeServiceSettings(1, 1, ".elser_model_1"); + var settings = new ElserInternalServiceSettings(1, 1, ".elser_model_1"); var copy = copyInstance(settings, TransportVersions.ELSER_SERVICE_MODEL_VERSION_ADDED); assertEquals(settings, copy); } { - var settings = new ElserMlNodeServiceSettings(1, 1, ".elser_model_1"); + var settings = new ElserInternalServiceSettings(1, 1, ".elser_model_1"); var copy = copyInstance(settings, TransportVersions.PLUGIN_DESCRIPTOR_OPTIONAL_CLASSNAME); assertNotEquals(settings, copy); - assertEquals(".elser_model_2", copy.getModelVariant()); + assertEquals(".elser_model_2", copy.getModelId()); } { - var settings = new ElserMlNodeServiceSettings(1, 1, ".elser_model_1"); + var settings = new ElserInternalServiceSettings(1, 1, ".elser_model_1"); var copy = copyInstance(settings, TransportVersions.V_8_11_X); assertEquals(settings, copy); } @@ -129,41 +129,33 @@ public void testBwcWrite() throws IOException { public void testFromMapInvalidSettings() { var settingsMap = new HashMap( - Map.of(ElserMlNodeServiceSettings.NUM_ALLOCATIONS, 0, ElserMlNodeServiceSettings.NUM_THREADS, -1) + Map.of(ElserInternalServiceSettings.NUM_ALLOCATIONS, 0, ElserInternalServiceSettings.NUM_THREADS, -1) ); - var e = expectThrows(ValidationException.class, () -> ElserMlNodeServiceSettings.fromMap(settingsMap)); + var e = expectThrows(ValidationException.class, () -> ElserInternalServiceSettings.fromMap(settingsMap)); assertThat(e.getMessage(), containsString("Invalid value [0]. [num_allocations] must be a positive integer")); assertThat(e.getMessage(), containsString("Invalid value [-1]. [num_threads] must be a positive integer")); } @Override - protected Writeable.Reader instanceReader() { - return ElserMlNodeServiceSettings::new; + protected Writeable.Reader instanceReader() { + return ElserInternalServiceSettings::new; } @Override - protected ElserMlNodeServiceSettings createTestInstance() { + protected ElserInternalServiceSettings createTestInstance() { return createRandom(); } @Override - protected ElserMlNodeServiceSettings mutateInstance(ElserMlNodeServiceSettings instance) { + protected ElserInternalServiceSettings mutateInstance(ElserInternalServiceSettings instance) { return switch (randomIntBetween(0, 2)) { - case 0 -> new ElserMlNodeServiceSettings( - instance.getNumAllocations() + 1, - instance.getNumThreads(), - instance.getModelVariant() - ); - case 1 -> new ElserMlNodeServiceSettings( - instance.getNumAllocations(), - instance.getNumThreads() + 1, - instance.getModelVariant() - ); + case 0 -> new ElserInternalServiceSettings(instance.getNumAllocations() + 1, instance.getNumThreads(), instance.getModelId()); + case 1 -> new ElserInternalServiceSettings(instance.getNumAllocations(), instance.getNumThreads() + 1, instance.getModelId()); case 2 -> { - var versions = new HashSet<>(ElserMlNodeService.VALID_ELSER_MODELS); - versions.remove(instance.getModelVariant()); - yield new ElserMlNodeServiceSettings(instance.getNumAllocations(), instance.getNumThreads(), versions.iterator().next()); + var versions = new HashSet<>(ElserInternalService.VALID_ELSER_MODEL_IDS); + versions.remove(instance.getModelId()); + yield new ElserInternalServiceSettings(instance.getNumAllocations(), instance.getNumThreads(), versions.iterator().next()); } default -> throw new IllegalStateException(); }; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java new file mode 100644 index 0000000000000..b098edd5e37bf --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserInternalServiceTests.java @@ -0,0 +1,314 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + * + * this file was contributed to by a generative AI + */ + +package org.elasticsearch.xpack.inference.services.elser; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.inference.InferenceServiceExtension; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +import static org.hamcrest.Matchers.containsString; +import static org.mockito.Mockito.mock; + +public class ElserInternalServiceTests extends ESTestCase { + + public static Model randomModelConfig(String inferenceEntityId, TaskType taskType) { + return switch (taskType) { + case SPARSE_EMBEDDING -> new ElserInternalModel( + inferenceEntityId, + taskType, + ElserInternalService.NAME, + ElserInternalServiceSettingsTests.createRandom(), + ElserMlNodeTaskSettingsTests.createRandom() + ); + default -> throw new IllegalArgumentException("task type " + taskType + " is not supported"); + }; + } + + public void testParseConfigStrict() { + var service = createService(mock(Client.class)); + + var settings = new HashMap(); + settings.put( + ModelConfigurations.SERVICE_SETTINGS, + new HashMap<>( + Map.of( + ElserInternalServiceSettings.NUM_ALLOCATIONS, + 1, + ElserInternalServiceSettings.NUM_THREADS, + 4, + "model_id", + ".elser_model_1" + ) + ) + ); + settings.put(ModelConfigurations.TASK_SETTINGS, Map.of()); + + var expectedModel = new ElserInternalModel( + "foo", + TaskType.SPARSE_EMBEDDING, + ElserInternalService.NAME, + new ElserInternalServiceSettings(1, 4, ".elser_model_1"), + ElserMlNodeTaskSettings.DEFAULT + ); + + var modelVerificationListener = getModelVerificationListener(expectedModel); + + service.parseRequestConfig("foo", TaskType.SPARSE_EMBEDDING, settings, Set.of(), modelVerificationListener); + + } + + public void testParseConfigLooseWithOldModelId() { + var service = createService(mock(Client.class)); + + var settings = new HashMap(); + settings.put( + ModelConfigurations.SERVICE_SETTINGS, + new HashMap<>( + Map.of( + ElserInternalServiceSettings.NUM_ALLOCATIONS, + 1, + ElserInternalServiceSettings.NUM_THREADS, + 4, + "model_version", + ".elser_model_1" + ) + ) + ); + settings.put(ModelConfigurations.TASK_SETTINGS, Map.of()); + + var expectedModel = new ElserInternalModel( + "foo", + TaskType.SPARSE_EMBEDDING, + ElserInternalService.NAME, + new ElserInternalServiceSettings(1, 4, ".elser_model_1"), + ElserMlNodeTaskSettings.DEFAULT + ); + + var realModel = service.parsePersistedConfig("foo", TaskType.SPARSE_EMBEDDING, settings); + + assertEquals(expectedModel, realModel); + + } + + private static ActionListener getModelVerificationListener(ElserInternalModel expectedModel) { + return ActionListener.wrap( + (model) -> { assertEquals(expectedModel, model); }, + (e) -> fail("Model verification should not fail " + e.getMessage()) + ); + } + + public void testParseConfigStrictWithNoTaskSettings() { + var service = createService(mock(Client.class)); + + var settings = new HashMap(); + settings.put( + ModelConfigurations.SERVICE_SETTINGS, + new HashMap<>(Map.of(ElserInternalServiceSettings.NUM_ALLOCATIONS, 1, ElserInternalServiceSettings.NUM_THREADS, 4)) + ); + + var expectedModel = new ElserInternalModel( + "foo", + TaskType.SPARSE_EMBEDDING, + ElserInternalService.NAME, + new ElserInternalServiceSettings(1, 4, ElserInternalService.ELSER_V2_MODEL), + ElserMlNodeTaskSettings.DEFAULT + ); + + var modelVerificationListener = getModelVerificationListener(expectedModel); + + service.parseRequestConfig("foo", TaskType.SPARSE_EMBEDDING, settings, Set.of(), modelVerificationListener); + + } + + public void testParseConfigStrictWithUnknownSettings() { + + var service = createService(mock(Client.class)); + + for (boolean throwOnUnknown : new boolean[] { true, false }) { + { + var settings = new HashMap(); + settings.put( + ModelConfigurations.SERVICE_SETTINGS, + new HashMap<>( + Map.of( + ElserInternalServiceSettings.NUM_ALLOCATIONS, + 1, + ElserInternalServiceSettings.NUM_THREADS, + 4, + ElserInternalServiceSettings.MODEL_ID, + ".elser_model_2" + ) + ) + ); + settings.put(ModelConfigurations.TASK_SETTINGS, Map.of()); + settings.put("foo", "bar"); + + ActionListener errorVerificationListener = ActionListener.wrap((model) -> { + if (throwOnUnknown) { + fail("Model verification should fail when throwOnUnknown is true"); + } + }, (e) -> { + if (throwOnUnknown) { + assertThat( + e.getMessage(), + containsString("Model configuration contains settings [{foo=bar}] unknown to the [elser] service") + ); + } else { + fail("Model verification should not fail when throwOnUnknown is false"); + } + }); + + if (throwOnUnknown == false) { + var parsed = service.parsePersistedConfigWithSecrets( + "foo", + TaskType.SPARSE_EMBEDDING, + settings, + Collections.emptyMap() + ); + } else { + + service.parseRequestConfig("foo", TaskType.SPARSE_EMBEDDING, settings, Set.of(), errorVerificationListener); + } + } + + { + var settings = new HashMap(); + settings.put( + ModelConfigurations.SERVICE_SETTINGS, + new HashMap<>( + Map.of( + ElserInternalServiceSettings.NUM_ALLOCATIONS, + 1, + ElserInternalServiceSettings.NUM_THREADS, + 4, + ElserInternalServiceSettings.MODEL_ID, + ".elser_model_2" + ) + ) + ); + settings.put(ModelConfigurations.TASK_SETTINGS, Map.of("foo", "bar")); + + ActionListener errorVerificationListener = ActionListener.wrap((model) -> { + if (throwOnUnknown) { + fail("Model verification should fail when throwOnUnknown is true"); + } + }, (e) -> { + if (throwOnUnknown) { + assertThat( + e.getMessage(), + containsString("Model configuration contains settings [{foo=bar}] unknown to the [elser] service") + ); + } else { + fail("Model verification should not fail when throwOnUnknown is false"); + } + }); + if (throwOnUnknown == false) { + var parsed = service.parsePersistedConfigWithSecrets( + "foo", + TaskType.SPARSE_EMBEDDING, + settings, + Collections.emptyMap() + ); + } else { + service.parseRequestConfig("foo", TaskType.SPARSE_EMBEDDING, settings, Set.of(), errorVerificationListener); + } + } + + { + var settings = new HashMap(); + settings.put( + ModelConfigurations.SERVICE_SETTINGS, + new HashMap<>( + Map.of( + ElserInternalServiceSettings.NUM_ALLOCATIONS, + 1, + ElserInternalServiceSettings.NUM_THREADS, + 4, + ElserInternalServiceSettings.MODEL_ID, + ".elser_model_2", + "foo", + "bar" + ) + ) + ); + settings.put(ModelConfigurations.TASK_SETTINGS, Map.of("foo", "bar")); + + ActionListener errorVerificationListener = ActionListener.wrap((model) -> { + if (throwOnUnknown) { + fail("Model verification should fail when throwOnUnknown is true"); + } + }, (e) -> { + if (throwOnUnknown) { + assertThat( + e.getMessage(), + containsString("Model configuration contains settings [{foo=bar}] unknown to the [elser] service") + ); + } else { + fail("Model verification should not fail when throwOnUnknown is false"); + } + }); + if (throwOnUnknown == false) { + var parsed = service.parsePersistedConfigWithSecrets( + "foo", + TaskType.SPARSE_EMBEDDING, + settings, + Collections.emptyMap() + ); + } else { + service.parseRequestConfig("foo", TaskType.SPARSE_EMBEDDING, settings, Set.of(), errorVerificationListener); + } + } + } + } + + public void testParseRequestConfig_DefaultModel() { + var service = createService(mock(Client.class)); + { + var settings = new HashMap(); + settings.put( + ModelConfigurations.SERVICE_SETTINGS, + new HashMap<>(Map.of(ElserInternalServiceSettings.NUM_ALLOCATIONS, 1, ElserInternalServiceSettings.NUM_THREADS, 4)) + ); + + ActionListener modelActionListener = ActionListener.wrap((model) -> { + assertEquals(".elser_model_2", ((ElserInternalModel) model).getServiceSettings().getModelId()); + }, (e) -> { fail("Model verification should not fail"); }); + + service.parseRequestConfig("foo", TaskType.SPARSE_EMBEDDING, settings, Set.of(), modelActionListener); + } + { + var settings = new HashMap(); + settings.put( + ModelConfigurations.SERVICE_SETTINGS, + new HashMap<>(Map.of(ElserInternalServiceSettings.NUM_ALLOCATIONS, 1, ElserInternalServiceSettings.NUM_THREADS, 4)) + ); + + ActionListener modelActionListener = ActionListener.wrap((model) -> { + assertEquals(".elser_model_2_linux-x86_64", ((ElserInternalModel) model).getServiceSettings().getModelId()); + }, (e) -> { fail("Model verification should not fail"); }); + + service.parseRequestConfig("foo", TaskType.SPARSE_EMBEDDING, settings, Set.of("linux-x86_64"), modelActionListener); + } + } + + private ElserInternalService createService(Client client) { + var context = new InferenceServiceExtension.InferenceServiceFactoryContext(client); + return new ElserInternalService(context); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceTests.java deleted file mode 100644 index 6e74241cc754c..0000000000000 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elser/ElserMlNodeServiceTests.java +++ /dev/null @@ -1,244 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.inference.services.elser; - -import org.elasticsearch.ElasticsearchStatusException; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.inference.InferenceServiceExtension; -import org.elasticsearch.inference.Model; -import org.elasticsearch.inference.ModelConfigurations; -import org.elasticsearch.inference.TaskType; -import org.elasticsearch.test.ESTestCase; - -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Set; - -import static org.hamcrest.Matchers.containsString; -import static org.mockito.Mockito.mock; - -public class ElserMlNodeServiceTests extends ESTestCase { - - public static Model randomModelConfig(String inferenceEntityId, TaskType taskType) { - return switch (taskType) { - case SPARSE_EMBEDDING -> new ElserMlNodeModel( - inferenceEntityId, - taskType, - ElserMlNodeService.NAME, - ElserMlNodeServiceSettingsTests.createRandom(), - ElserMlNodeTaskSettingsTests.createRandom() - ); - default -> throw new IllegalArgumentException("task type " + taskType + " is not supported"); - }; - } - - public void testParseConfigStrict() { - var service = createService(mock(Client.class)); - - var settings = new HashMap(); - settings.put( - ModelConfigurations.SERVICE_SETTINGS, - new HashMap<>( - Map.of( - ElserMlNodeServiceSettings.NUM_ALLOCATIONS, - 1, - ElserMlNodeServiceSettings.NUM_THREADS, - 4, - "model_version", - ".elser_model_1" - ) - ) - ); - settings.put(ModelConfigurations.TASK_SETTINGS, Map.of()); - - ElserMlNodeModel parsedModel = service.parseRequestConfig("foo", TaskType.SPARSE_EMBEDDING, settings, Set.of()); - - assertEquals( - new ElserMlNodeModel( - "foo", - TaskType.SPARSE_EMBEDDING, - ElserMlNodeService.NAME, - new ElserMlNodeServiceSettings(1, 4, ".elser_model_1"), - ElserMlNodeTaskSettings.DEFAULT - ), - parsedModel - ); - } - - public void testParseConfigStrictWithNoTaskSettings() { - var service = createService(mock(Client.class)); - - var settings = new HashMap(); - settings.put( - ModelConfigurations.SERVICE_SETTINGS, - new HashMap<>(Map.of(ElserMlNodeServiceSettings.NUM_ALLOCATIONS, 1, ElserMlNodeServiceSettings.NUM_THREADS, 4)) - ); - - ElserMlNodeModel parsedModel = service.parseRequestConfig("foo", TaskType.SPARSE_EMBEDDING, settings, Set.of()); - - assertEquals( - new ElserMlNodeModel( - "foo", - TaskType.SPARSE_EMBEDDING, - ElserMlNodeService.NAME, - new ElserMlNodeServiceSettings(1, 4, ElserMlNodeService.ELSER_V2_MODEL), - ElserMlNodeTaskSettings.DEFAULT - ), - parsedModel - ); - } - - public void testParseConfigStrictWithUnknownSettings() { - - var service = createService(mock(Client.class)); - - for (boolean throwOnUnknown : new boolean[] { true, false }) { - { - var settings = new HashMap(); - settings.put( - ModelConfigurations.SERVICE_SETTINGS, - new HashMap<>( - Map.of( - ElserMlNodeServiceSettings.NUM_ALLOCATIONS, - 1, - ElserMlNodeServiceSettings.NUM_THREADS, - 4, - ElserMlNodeServiceSettings.MODEL_VERSION, - ".elser_model_2" - ) - ) - ); - settings.put(ModelConfigurations.TASK_SETTINGS, Map.of()); - settings.put("foo", "bar"); - - if (throwOnUnknown) { - var e = expectThrows( - ElasticsearchStatusException.class, - () -> service.parseRequestConfig("foo", TaskType.SPARSE_EMBEDDING, settings, Set.of()) - ); - assertThat( - e.getMessage(), - containsString("Model configuration contains settings [{foo=bar}] unknown to the [elser] service") - ); - } else { - var parsed = service.parsePersistedConfigWithSecrets( - "foo", - TaskType.SPARSE_EMBEDDING, - settings, - Collections.emptyMap() - ); - } - } - - { - var settings = new HashMap(); - settings.put( - ModelConfigurations.SERVICE_SETTINGS, - new HashMap<>( - Map.of( - ElserMlNodeServiceSettings.NUM_ALLOCATIONS, - 1, - ElserMlNodeServiceSettings.NUM_THREADS, - 4, - ElserMlNodeServiceSettings.MODEL_VERSION, - ".elser_model_2" - ) - ) - ); - settings.put(ModelConfigurations.TASK_SETTINGS, Map.of("foo", "bar")); - - if (throwOnUnknown) { - var e = expectThrows( - ElasticsearchStatusException.class, - () -> service.parseRequestConfig("foo", TaskType.SPARSE_EMBEDDING, settings, Set.of()) - ); - assertThat( - e.getMessage(), - containsString("Model configuration contains settings [{foo=bar}] unknown to the [elser] service") - ); - } else { - var parsed = service.parsePersistedConfigWithSecrets( - "foo", - TaskType.SPARSE_EMBEDDING, - settings, - Collections.emptyMap() - ); - } - } - - { - var settings = new HashMap(); - settings.put( - ModelConfigurations.SERVICE_SETTINGS, - new HashMap<>( - Map.of( - ElserMlNodeServiceSettings.NUM_ALLOCATIONS, - 1, - ElserMlNodeServiceSettings.NUM_THREADS, - 4, - ElserMlNodeServiceSettings.MODEL_VERSION, - ".elser_model_2", - "foo", - "bar" - ) - ) - ); - - if (throwOnUnknown) { - var e = expectThrows( - ElasticsearchStatusException.class, - () -> service.parseRequestConfig("foo", TaskType.SPARSE_EMBEDDING, settings, Set.of()) - ); - assertThat( - e.getMessage(), - containsString("Model configuration contains settings [{foo=bar}] unknown to the [elser] service") - ); - } else { - var parsed = service.parsePersistedConfigWithSecrets( - "foo", - TaskType.SPARSE_EMBEDDING, - settings, - Collections.emptyMap() - ); - } - } - } - } - - public void testParseRequestConfig_DefaultModel() { - var service = createService(mock(Client.class)); - { - var settings = new HashMap(); - settings.put( - ModelConfigurations.SERVICE_SETTINGS, - new HashMap<>(Map.of(ElserMlNodeServiceSettings.NUM_ALLOCATIONS, 1, ElserMlNodeServiceSettings.NUM_THREADS, 4)) - ); - - ElserMlNodeModel parsedModel = service.parseRequestConfig("foo", TaskType.SPARSE_EMBEDDING, settings, Set.of()); - - assertEquals(".elser_model_2", parsedModel.getServiceSettings().getModelVariant()); - } - { - var settings = new HashMap(); - settings.put( - ModelConfigurations.SERVICE_SETTINGS, - new HashMap<>(Map.of(ElserMlNodeServiceSettings.NUM_ALLOCATIONS, 1, ElserMlNodeServiceSettings.NUM_THREADS, 4)) - ); - - ElserMlNodeModel parsedModel = service.parseRequestConfig("foo", TaskType.SPARSE_EMBEDDING, settings, Set.of("linux-x86_64")); - - assertEquals(".elser_model_2_linux-x86_64", parsedModel.getServiceSettings().getModelVariant()); - } - } - - private ElserMlNodeService createService(Client client) { - var context = new InferenceServiceExtension.InferenceServiceFactoryContext(client); - return new ElserMlNodeService(context); - } -} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java index 36a4d144d8c5c..b34a8ad8a3d65 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java @@ -3,6 +3,8 @@ * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. + * + * this file was contributed to by a generative AI */ package org.elasticsearch.xpack.inference.services.huggingface; @@ -10,6 +12,7 @@ import org.apache.http.HttpHeaders; import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Nullable; @@ -86,18 +89,22 @@ public void testParseRequestConfig_CreatesAnEmbeddingsModel() throws IOException new SetOnce<>(createWithEmptySettings(threadPool)) ) ) { - var model = service.parseRequestConfig( + + ActionListener modelVerificationActionListener = ActionListener.wrap((model) -> { + assertThat(model, instanceOf(HuggingFaceEmbeddingsModel.class)); + + var embeddingsModel = (HuggingFaceEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + }, (e) -> fail("parse request should not fail " + e.getMessage())); + + service.parseRequestConfig( "id", TaskType.TEXT_EMBEDDING, getRequestConfigMap(getServiceSettingsMap("url"), getSecretSettingsMap("secret")), - Set.of() + Set.of(), + modelVerificationActionListener ); - - assertThat(model, instanceOf(HuggingFaceEmbeddingsModel.class)); - - var embeddingsModel = (HuggingFaceEmbeddingsModel) model; - assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); - assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } } @@ -108,18 +115,21 @@ public void testParseRequestConfig_CreatesAnElserModel() throws IOException { new SetOnce<>(createWithEmptySettings(threadPool)) ) ) { - var model = service.parseRequestConfig( + ActionListener modelVerificationActionListener = ActionListener.wrap((model) -> { + assertThat(model, instanceOf(HuggingFaceElserModel.class)); + + var embeddingsModel = (HuggingFaceElserModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + }, (e) -> fail("parse request should not fail " + e.getMessage())); + + service.parseRequestConfig( "id", TaskType.SPARSE_EMBEDDING, getRequestConfigMap(getServiceSettingsMap("url"), getSecretSettingsMap("secret")), - Set.of() + Set.of(), + modelVerificationActionListener ); - - assertThat(model, instanceOf(HuggingFaceElserModel.class)); - - var embeddingsModel = (HuggingFaceElserModel) model; - assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); - assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } } @@ -133,15 +143,18 @@ public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInConfig() throws I var config = getRequestConfigMap(getServiceSettingsMap("url"), getSecretSettingsMap("secret")); config.put("extra_key", "value"); - var thrownException = expectThrows( - ElasticsearchStatusException.class, - () -> service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of()) + ActionListener modelVerificationActionListener = ActionListener.wrap( + (model) -> { fail("parse request should fail"); }, + (e) -> { + assertThat(e, instanceOf(ElasticsearchStatusException.class)); + assertThat( + e.getMessage(), + is("Model configuration contains settings [{extra_key=value}] unknown to the [hugging_face] service") + ); + } ); - assertThat( - thrownException.getMessage(), - is("Model configuration contains settings [{extra_key=value}] unknown to the [hugging_face] service") - ); + service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of(), modelVerificationActionListener); } } @@ -157,15 +170,18 @@ public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInServiceSettingsMa var config = getRequestConfigMap(serviceSettings, getSecretSettingsMap("secret")); - var thrownException = expectThrows( - ElasticsearchStatusException.class, - () -> service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of()) + ActionListener modelVerificationActionListener = ActionListener.wrap( + (model) -> { fail("parse request should fail"); }, + (e) -> { + assertThat(e, instanceOf(ElasticsearchStatusException.class)); + assertThat( + e.getMessage(), + is("Model configuration contains settings [{extra_key=value}] unknown to the [hugging_face] service") + ); + } ); - assertThat( - thrownException.getMessage(), - is("Model configuration contains settings [{extra_key=value}] unknown to the [hugging_face] service") - ); + service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of(), modelVerificationActionListener); } } @@ -181,15 +197,18 @@ public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInSecretSettingsMap var config = getRequestConfigMap(getServiceSettingsMap("url"), secretSettingsMap); - var thrownException = expectThrows( - ElasticsearchStatusException.class, - () -> service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of()) + ActionListener modelVerificationActionListener = ActionListener.wrap( + (model) -> { fail("parse request should fail"); }, + (e) -> { + assertThat(e, instanceOf(ElasticsearchStatusException.class)); + assertThat( + e.getMessage(), + is("Model configuration contains settings [{extra_key=value}] unknown to the [hugging_face] service") + ); + } ); - assertThat( - thrownException.getMessage(), - is("Model configuration contains settings [{extra_key=value}] unknown to the [hugging_face] service") - ); + service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of(), modelVerificationActionListener); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java index 1e40b86bb7597..b7d86620d9001 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java @@ -3,6 +3,8 @@ * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. + * + * this file was contributed to by a generative AI */ package org.elasticsearch.xpack.inference.services.openai; @@ -11,6 +13,7 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; @@ -92,7 +95,18 @@ public void testParseRequestConfig_CreatesAnOpenAiEmbeddingsModel() throws IOExc new SetOnce<>(createWithEmptySettings(threadPool)) ) ) { - var model = service.parseRequestConfig( + ActionListener modelVerificationListener = ActionListener.wrap(model -> { + assertThat(model, instanceOf(OpenAiEmbeddingsModel.class)); + + var embeddingsModel = (OpenAiEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); + assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); + assertThat(embeddingsModel.getTaskSettings().modelId(), is("model")); + assertThat(embeddingsModel.getTaskSettings().user(), is("user")); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + }, exception -> fail("Unexpected exception: " + exception)); + + service.parseRequestConfig( "id", TaskType.TEXT_EMBEDDING, getRequestConfigMap( @@ -100,17 +114,9 @@ public void testParseRequestConfig_CreatesAnOpenAiEmbeddingsModel() throws IOExc getTaskSettingsMap("model", "user"), getSecretSettingsMap("secret") ), - Set.of() + Set.of(), + modelVerificationListener ); - - assertThat(model, instanceOf(OpenAiEmbeddingsModel.class)); - - var embeddingsModel = (OpenAiEmbeddingsModel) model; - assertThat(embeddingsModel.getServiceSettings().uri().toString(), is("url")); - assertThat(embeddingsModel.getServiceSettings().organizationId(), is("org")); - assertThat(embeddingsModel.getTaskSettings().modelId(), is("model")); - assertThat(embeddingsModel.getTaskSettings().user(), is("user")); - assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } } @@ -121,21 +127,25 @@ public void testParseRequestConfig_ThrowsUnsupportedModelType() throws IOExcepti new SetOnce<>(createWithEmptySettings(threadPool)) ) ) { - var thrownException = expectThrows( - ElasticsearchStatusException.class, - () -> service.parseRequestConfig( - "id", - TaskType.SPARSE_EMBEDDING, - getRequestConfigMap( - getServiceSettingsMap("url", "org"), - getTaskSettingsMap("model", "user"), - getSecretSettingsMap("secret") - ), - Set.of() - ) + ActionListener modelVerificationListener = ActionListener.wrap( + model -> fail("Expected exception, but got model: " + model), + exception -> { + assertThat(exception, instanceOf(ElasticsearchStatusException.class)); + assertThat(exception.getMessage(), is("The [openai] service does not support task type [sparse_embedding]")); + } ); - assertThat(thrownException.getMessage(), is("The [openai] service does not support task type [sparse_embedding]")); + service.parseRequestConfig( + "id", + TaskType.SPARSE_EMBEDDING, + getRequestConfigMap( + getServiceSettingsMap("url", "org"), + getTaskSettingsMap("model", "user"), + getSecretSettingsMap("secret") + ), + Set.of(), + modelVerificationListener + ); } } @@ -153,15 +163,18 @@ public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInConfig() throws I ); config.put("extra_key", "value"); - var thrownException = expectThrows( - ElasticsearchStatusException.class, - () -> service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of()) + ActionListener modelVerificationListener = ActionListener.wrap( + model -> fail("Expected exception, but got model: " + model), + exception -> { + assertThat(exception, instanceOf(ElasticsearchStatusException.class)); + assertThat( + exception.getMessage(), + is("Model configuration contains settings [{extra_key=value}] unknown to the [openai] service") + ); + } ); - assertThat( - thrownException.getMessage(), - is("Model configuration contains settings [{extra_key=value}] unknown to the [openai] service") - ); + service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of(), modelVerificationListener); } } @@ -177,15 +190,14 @@ public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInServiceSettingsMa var config = getRequestConfigMap(serviceSettings, getTaskSettingsMap("model", "user"), getSecretSettingsMap("secret")); - var thrownException = expectThrows( - ElasticsearchStatusException.class, - () -> service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of()) - ); + ActionListener modelVerificationListener = ActionListener.wrap((model) -> { + fail("Expected exception, but got model: " + model); + }, e -> { + assertThat(e, instanceOf(ElasticsearchStatusException.class)); + assertThat(e.getMessage(), is("Model configuration contains settings [{extra_key=value}] unknown to the [openai] service")); + }); - assertThat( - thrownException.getMessage(), - is("Model configuration contains settings [{extra_key=value}] unknown to the [openai] service") - ); + service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of(), modelVerificationListener); } } @@ -201,15 +213,14 @@ public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInTaskSettingsMap() var config = getRequestConfigMap(getServiceSettingsMap("url", "org"), taskSettingsMap, getSecretSettingsMap("secret")); - var thrownException = expectThrows( - ElasticsearchStatusException.class, - () -> service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of()) - ); + ActionListener modelVerificationListener = ActionListener.wrap((model) -> { + fail("Expected exception, but got model: " + model); + }, e -> { + assertThat(e, instanceOf(ElasticsearchStatusException.class)); + assertThat(e.getMessage(), is("Model configuration contains settings [{extra_key=value}] unknown to the [openai] service")); + }); - assertThat( - thrownException.getMessage(), - is("Model configuration contains settings [{extra_key=value}] unknown to the [openai] service") - ); + service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of(), modelVerificationListener); } } @@ -225,15 +236,14 @@ public void testParseRequestConfig_ThrowsWhenAnExtraKeyExistsInSecretSettingsMap var config = getRequestConfigMap(getServiceSettingsMap("url", "org"), getTaskSettingsMap("model", "user"), secretSettingsMap); - var thrownException = expectThrows( - ElasticsearchStatusException.class, - () -> service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of()) - ); + ActionListener modelVerificationListener = ActionListener.wrap((model) -> { + fail("Expected exception, but got model: " + model); + }, e -> { + assertThat(e, instanceOf(ElasticsearchStatusException.class)); + assertThat(e.getMessage(), is("Model configuration contains settings [{extra_key=value}] unknown to the [openai] service")); + }); - assertThat( - thrownException.getMessage(), - is("Model configuration contains settings [{extra_key=value}] unknown to the [openai] service") - ); + service.parseRequestConfig("id", TaskType.TEXT_EMBEDDING, config, Set.of(), modelVerificationListener); } } @@ -244,21 +254,25 @@ public void testParseRequestConfig_CreatesAnOpenAiEmbeddingsModelWithoutUserUrlO new SetOnce<>(createWithEmptySettings(threadPool)) ) ) { - var model = service.parseRequestConfig( + + ActionListener modelVerificationListener = ActionListener.wrap(model -> { + assertThat(model, instanceOf(OpenAiEmbeddingsModel.class)); + + var embeddingsModel = (OpenAiEmbeddingsModel) model; + assertNull(embeddingsModel.getServiceSettings().uri()); + assertNull(embeddingsModel.getServiceSettings().organizationId()); + assertThat(embeddingsModel.getTaskSettings().modelId(), is("model")); + assertNull(embeddingsModel.getTaskSettings().user()); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); + }, exception -> fail("Unexpected exception: " + exception)); + + service.parseRequestConfig( "id", TaskType.TEXT_EMBEDDING, getRequestConfigMap(getServiceSettingsMap(null, null), getTaskSettingsMap("model", null), getSecretSettingsMap("secret")), - Set.of() + Set.of(), + modelVerificationListener ); - - assertThat(model, instanceOf(OpenAiEmbeddingsModel.class)); - - var embeddingsModel = (OpenAiEmbeddingsModel) model; - assertNull(embeddingsModel.getServiceSettings().uri()); - assertNull(embeddingsModel.getServiceSettings().organizationId()); - assertThat(embeddingsModel.getTaskSettings().modelId(), is("model")); - assertNull(embeddingsModel.getTaskSettings().user()); - assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is("secret")); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/textembedding/MultilingualE5SmallInternalServiceSettingsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/textembedding/MultilingualE5SmallInternalServiceSettingsTests.java new file mode 100644 index 0000000000000..10e34a277eea3 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/textembedding/MultilingualE5SmallInternalServiceSettingsTests.java @@ -0,0 +1,153 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.textembedding; + +import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; + +public class MultilingualE5SmallInternalServiceSettingsTests extends AbstractWireSerializingTestCase< + MultilingualE5SmallInternalServiceSettings> { + + public static MultilingualE5SmallInternalServiceSettings createRandom() { + return new MultilingualE5SmallInternalServiceSettings( + randomIntBetween(1, 4), + randomIntBetween(1, 4), + randomFrom(TextEmbeddingInternalService.MULTILINGUAL_E5_SMALL_VALID_IDS) + ); + } + + public void testFromMap_DefaultModelVersion() { + var serviceSettingsBuilder = MultilingualE5SmallInternalServiceSettings.fromMap( + new HashMap<>( + Map.of( + MultilingualE5SmallInternalServiceSettings.NUM_ALLOCATIONS, + 1, + MultilingualE5SmallInternalServiceSettings.NUM_THREADS, + 4 + ) + ) + ); + assertNull(serviceSettingsBuilder.getModelId()); + } + + public void testFromMap() { + String randomModelVariant = randomFrom(TextEmbeddingInternalService.MULTILINGUAL_E5_SMALL_VALID_IDS); + var serviceSettings = MultilingualE5SmallInternalServiceSettings.fromMap( + new HashMap<>( + Map.of( + MultilingualE5SmallInternalServiceSettings.NUM_ALLOCATIONS, + 1, + MultilingualE5SmallInternalServiceSettings.NUM_THREADS, + 4, + MultilingualE5SmallInternalServiceSettings.MODEL_ID, + randomModelVariant + ) + ) + ).build(); + assertEquals(new MultilingualE5SmallInternalServiceSettings(1, 4, randomModelVariant), serviceSettings); + } + + public void testFromMapInvalidVersion() { + String randomModelVariant = randomAlphaOfLength(10); + var e = expectThrows( + ValidationException.class, + () -> MultilingualE5SmallInternalServiceSettings.fromMap( + new HashMap<>( + Map.of( + MultilingualE5SmallInternalServiceSettings.NUM_ALLOCATIONS, + 1, + MultilingualE5SmallInternalServiceSettings.NUM_THREADS, + 4, + "model_id", + randomModelVariant + ) + ) + ) + ); + assertThat(e.getMessage(), containsString("unknown Multilingual-E5-Small model ID [" + randomModelVariant + "]. Valid IDs are [")); + } + + public void testFromMapMissingOptions() { + var e = expectThrows( + ValidationException.class, + () -> MultilingualE5SmallInternalServiceSettings.fromMap( + new HashMap<>(Map.of(MultilingualE5SmallInternalServiceSettings.NUM_ALLOCATIONS, 1)) + ) + ); + + assertThat(e.getMessage(), containsString("[service_settings] does not contain the required setting [num_threads]")); + + e = expectThrows( + ValidationException.class, + () -> MultilingualE5SmallInternalServiceSettings.fromMap( + new HashMap<>(Map.of(MultilingualE5SmallInternalServiceSettings.NUM_THREADS, 1)) + ) + ); + + assertThat(e.getMessage(), containsString("[service_settings] does not contain the required setting [num_allocations]")); + } + + public void testFromMapInvalidSettings() { + var settingsMap = new HashMap( + Map.of( + MultilingualE5SmallInternalServiceSettings.NUM_ALLOCATIONS, + 0, + MultilingualE5SmallInternalServiceSettings.NUM_THREADS, + -1 + ) + ); + var e = expectThrows(ValidationException.class, () -> MultilingualE5SmallInternalServiceSettings.fromMap(settingsMap)); + + assertThat(e.getMessage(), containsString("Invalid value [0]. [num_allocations] must be a positive integer")); + assertThat(e.getMessage(), containsString("Invalid value [-1]. [num_threads] must be a positive integer")); + } + + @Override + protected Writeable.Reader instanceReader() { + return MultilingualE5SmallInternalServiceSettings::new; + } + + @Override + protected MultilingualE5SmallInternalServiceSettings createTestInstance() { + return createRandom(); + } + + @Override + protected MultilingualE5SmallInternalServiceSettings mutateInstance(MultilingualE5SmallInternalServiceSettings instance) { + return switch (randomIntBetween(0, 2)) { + case 0 -> new MultilingualE5SmallInternalServiceSettings( + instance.getNumAllocations() + 1, + instance.getNumThreads(), + instance.getModelId() + ); + case 1 -> new MultilingualE5SmallInternalServiceSettings( + instance.getNumAllocations(), + instance.getNumThreads() + 1, + instance.getModelId() + ); + case 2 -> { + var versions = new HashSet<>(TextEmbeddingInternalService.MULTILINGUAL_E5_SMALL_VALID_IDS); + versions.remove(instance.getModelId()); + yield new MultilingualE5SmallInternalServiceSettings( + instance.getNumAllocations(), + instance.getNumThreads(), + versions.iterator().next() + ); + } + default -> throw new IllegalStateException(); + }; + } + +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/textembedding/TextEmbeddingInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/textembedding/TextEmbeddingInternalServiceTests.java new file mode 100644 index 0000000000000..d8b808bed3336 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/textembedding/TextEmbeddingInternalServiceTests.java @@ -0,0 +1,349 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + * + * This file was contributed to by a Generative AI + */ + +package org.elasticsearch.xpack.inference.services.textembedding; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.inference.InferenceServiceExtension; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.ModelConfigurations; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.inference.services.settings.InternalServiceSettings; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.Set; + +import static org.hamcrest.Matchers.instanceOf; +import static org.mockito.Mockito.mock; + +public class TextEmbeddingInternalServiceTests extends ESTestCase { + + TaskType taskType = TaskType.TEXT_EMBEDDING; + String randomInferenceEntityId = randomAlphaOfLength(10); + + public void testParseRequestConfig() { + + // Null model variant + { + var service = createService(mock(Client.class)); + var settings = new HashMap(); + settings.put( + ModelConfigurations.SERVICE_SETTINGS, + new HashMap<>( + Map.of(TextEmbeddingInternalServiceSettings.NUM_ALLOCATIONS, 1, TextEmbeddingInternalServiceSettings.NUM_THREADS, 4) + ) + ); + + ActionListener modelListener = ActionListener.wrap( + model -> fail("Model parsing should have failed"), + e -> assertThat(e, instanceOf(IllegalArgumentException.class)) + ); + + service.parseRequestConfig(randomInferenceEntityId, taskType, settings, Set.of(), modelListener); + } + + // Valid model variant + { + var service = createService(mock(Client.class)); + var settings = new HashMap(); + settings.put( + ModelConfigurations.SERVICE_SETTINGS, + new HashMap<>( + Map.of( + TextEmbeddingInternalServiceSettings.NUM_ALLOCATIONS, + 1, + TextEmbeddingInternalServiceSettings.NUM_THREADS, + 4, + InternalServiceSettings.MODEL_ID, + TextEmbeddingInternalService.MULTILINGUAL_E5_SMALL_MODEL_ID + ) + ) + ); + + var e5ServiceSettings = new MultilingualE5SmallInternalServiceSettings( + 1, + 4, + TextEmbeddingInternalService.MULTILINGUAL_E5_SMALL_MODEL_ID + ); + + service.parseRequestConfig( + randomInferenceEntityId, + taskType, + settings, + Set.of(), + getModelVerificationActionListener(e5ServiceSettings) + ); + } + + // Invalid config map + { + var service = createService(mock(Client.class)); + var settings = new HashMap(); + settings.put( + ModelConfigurations.SERVICE_SETTINGS, + new HashMap<>( + Map.of(TextEmbeddingInternalServiceSettings.NUM_ALLOCATIONS, 1, TextEmbeddingInternalServiceSettings.NUM_THREADS, 4) + ) + ); + settings.put("not_a_valid_config_setting", randomAlphaOfLength(10)); + + ActionListener modelListener = ActionListener.wrap( + model -> fail("Model parsing should have failed"), + e -> assertThat(e, instanceOf(IllegalArgumentException.class)) + ); + + service.parseRequestConfig(randomInferenceEntityId, taskType, settings, Set.of(), modelListener); + } + + // Invalid service settings + { + var service = createService(mock(Client.class)); + var settings = new HashMap(); + settings.put( + ModelConfigurations.SERVICE_SETTINGS, + new HashMap<>( + Map.of( + TextEmbeddingInternalServiceSettings.NUM_ALLOCATIONS, + 1, + TextEmbeddingInternalServiceSettings.NUM_THREADS, + 4, + InternalServiceSettings.MODEL_ID, + TextEmbeddingInternalService.MULTILINGUAL_E5_SMALL_MODEL_ID, // we can't directly test the eland case until we mock + // the threadpool within the client + "not_a_valid_service_setting", + randomAlphaOfLength(10) + ) + ) + ); + + ActionListener modelListener = ActionListener.wrap( + model -> fail("Model parsing should have failed"), + e -> assertThat(e, instanceOf(ElasticsearchStatusException.class)) + ); + + service.parseRequestConfig(randomInferenceEntityId, taskType, settings, Set.of(), modelListener); + } + + // Extra service settings + { + var service = createService(mock(Client.class)); + var settings = new HashMap(); + settings.put( + ModelConfigurations.SERVICE_SETTINGS, + new HashMap<>( + Map.of( + TextEmbeddingInternalServiceSettings.NUM_ALLOCATIONS, + 1, + TextEmbeddingInternalServiceSettings.NUM_THREADS, + 4, + InternalServiceSettings.MODEL_ID, + TextEmbeddingInternalService.MULTILINGUAL_E5_SMALL_MODEL_ID, // we can't directly test the eland case until we mock + // the threadpool within the client + "extra_setting_that_should_not_be_here", + randomAlphaOfLength(10) + ) + ) + ); + + ActionListener modelListener = ActionListener.wrap( + model -> fail("Model parsing should have failed"), + e -> assertThat(e, instanceOf(ElasticsearchStatusException.class)) + ); + + service.parseRequestConfig(randomInferenceEntityId, taskType, settings, Set.of(), modelListener); + } + + // Extra settings + { + var service = createService(mock(Client.class)); + var settings = new HashMap(); + settings.put( + ModelConfigurations.SERVICE_SETTINGS, + new HashMap<>( + Map.of( + TextEmbeddingInternalServiceSettings.NUM_ALLOCATIONS, + 1, + TextEmbeddingInternalServiceSettings.NUM_THREADS, + 4, + InternalServiceSettings.MODEL_ID, + TextEmbeddingInternalService.MULTILINGUAL_E5_SMALL_MODEL_ID // we can't directly test the eland case until we mock + // the threadpool within the client + ) + ) + ); + settings.put("extra_setting_that_should_not_be_here", randomAlphaOfLength(10)); + + ActionListener modelListener = ActionListener.wrap( + model -> fail("Model parsing should have failed"), + e -> assertThat(e, instanceOf(ElasticsearchStatusException.class)) + ); + + service.parseRequestConfig(randomInferenceEntityId, taskType, settings, Set.of(), modelListener); + } + } + + private ActionListener getModelVerificationActionListener(MultilingualE5SmallInternalServiceSettings e5ServiceSettings) { + return ActionListener.wrap(model -> { + assertEquals( + new MultilingualE5SmallModel(randomInferenceEntityId, taskType, TextEmbeddingInternalService.NAME, e5ServiceSettings), + model + ); + }, e -> { fail("Model parsing failed " + e.getMessage()); }); + } + + public void testParsePersistedConfig() { + + // Null model variant + { + var service = createService(mock(Client.class)); + var settings = new HashMap(); + settings.put( + ModelConfigurations.SERVICE_SETTINGS, + new HashMap<>( + Map.of(TextEmbeddingInternalServiceSettings.NUM_ALLOCATIONS, 1, TextEmbeddingInternalServiceSettings.NUM_THREADS, 4) + ) + ); + + var e5ServiceSettings = new MultilingualE5SmallInternalServiceSettings( + 1, + 4, + TextEmbeddingInternalService.MULTILINGUAL_E5_SMALL_MODEL_ID + ); + + expectThrows(IllegalArgumentException.class, () -> service.parsePersistedConfig(randomInferenceEntityId, taskType, settings)); + + } + + // Invalid model variant + // because this is a persisted config, we assume that the model does exist, even though it doesn't. In practice, the trained models + // API would throw an exception when the model is used + { + var service = createService(mock(Client.class)); + var settings = new HashMap(); + settings.put( + ModelConfigurations.SERVICE_SETTINGS, + new HashMap<>( + Map.of( + TextEmbeddingInternalServiceSettings.NUM_ALLOCATIONS, + 1, + TextEmbeddingInternalServiceSettings.NUM_THREADS, + 4, + InternalServiceSettings.MODEL_ID, + "invalid" + ) + ) + ); + + CustomElandModel parsedModel = (CustomElandModel) service.parsePersistedConfig(randomInferenceEntityId, taskType, settings); + var elandServiceSettings = new CustomElandInternalServiceSettings(1, 4, "invalid"); + assertEquals( + new CustomElandModel(randomInferenceEntityId, taskType, TextEmbeddingInternalService.NAME, elandServiceSettings), + parsedModel + ); + } + + // Valid model variant + { + var service = createService(mock(Client.class)); + var settings = new HashMap(); + settings.put( + ModelConfigurations.SERVICE_SETTINGS, + new HashMap<>( + Map.of( + TextEmbeddingInternalServiceSettings.NUM_ALLOCATIONS, + 1, + TextEmbeddingInternalServiceSettings.NUM_THREADS, + 4, + InternalServiceSettings.MODEL_ID, + TextEmbeddingInternalService.MULTILINGUAL_E5_SMALL_MODEL_ID + ) + ) + ); + + var e5ServiceSettings = new MultilingualE5SmallInternalServiceSettings( + 1, + 4, + TextEmbeddingInternalService.MULTILINGUAL_E5_SMALL_MODEL_ID + ); + + MultilingualE5SmallModel parsedModel = (MultilingualE5SmallModel) service.parsePersistedConfig( + randomInferenceEntityId, + taskType, + settings + ); + assertEquals( + new MultilingualE5SmallModel(randomInferenceEntityId, taskType, TextEmbeddingInternalService.NAME, e5ServiceSettings), + parsedModel + ); + } + + // Invalid config map + { + var service = createService(mock(Client.class)); + var settings = new HashMap(); + settings.put( + ModelConfigurations.SERVICE_SETTINGS, + new HashMap<>( + Map.of(TextEmbeddingInternalServiceSettings.NUM_ALLOCATIONS, 1, TextEmbeddingInternalServiceSettings.NUM_THREADS, 4) + ) + ); + settings.put("not_a_valid_config_setting", randomAlphaOfLength(10)); + expectThrows(IllegalArgumentException.class, () -> service.parsePersistedConfig(randomInferenceEntityId, taskType, settings)); + } + + // Invalid service settings + { + var service = createService(mock(Client.class)); + var settings = new HashMap(); + settings.put( + ModelConfigurations.SERVICE_SETTINGS, + new HashMap<>( + Map.of( + TextEmbeddingInternalServiceSettings.NUM_ALLOCATIONS, + 1, + TextEmbeddingInternalServiceSettings.NUM_THREADS, + 4, + "not_a_valid_service_setting", + randomAlphaOfLength(10) + ) + ) + ); + expectThrows(IllegalArgumentException.class, () -> service.parsePersistedConfig(randomInferenceEntityId, taskType, settings)); + } + } + + private TextEmbeddingInternalService createService(Client client) { + var context = new InferenceServiceExtension.InferenceServiceFactoryContext(client); + return new TextEmbeddingInternalService(context); + } + + public static Model randomModelConfig(String inferenceEntityId) { + List givenList = Arrays.asList("MultilingualE5SmallModel"); + Random rand = org.elasticsearch.common.Randomness.get(); + String model = givenList.get(rand.nextInt(givenList.size())); + + return switch (model) { + case "MultilingualE5SmallModel" -> new MultilingualE5SmallModel( + inferenceEntityId, + TaskType.TEXT_EMBEDDING, + TextEmbeddingInternalService.NAME, + MultilingualE5SmallInternalServiceSettingsTests.createRandom() + ); + default -> throw new IllegalArgumentException("model " + model + " is not supported for testing"); + }; + } + +} From a8070debb5b71e70a63c5d535e5cd32b48743104 Mon Sep 17 00:00:00 2001 From: Fang Xing <155562079+fang-xing-esql@users.noreply.github.com> Date: Tue, 6 Feb 2024 16:04:41 -0500 Subject: [PATCH 088/106] [ES|QL]Allow AUTO_BUCKET to accept references created by EVAL as input to from and to (#104772) Defer auto_bucket foldable verification to LogicalPlanOptimizer --- .../src/main/resources/date.csv-spec | 53 +++++++++++ .../esql/action/CrossClustersEnrichIT.java | 2 +- .../xpack/esql/action/EsqlActionIT.java | 2 +- .../xpack/esql/action/EsqlAsyncActionIT.java | 2 +- .../xpack/esql/ExceptionUtils.java | 1 - .../{analysis => }/VerificationException.java | 5 +- .../xpack/esql/analysis/Analyzer.java | 1 + .../function/scalar/math/AutoBucket.java | 16 +--- .../esql/optimizer/LogicalPlanOptimizer.java | 91 ++++++++++++++++++- .../xpack/esql/analysis/AnalyzerTests.java | 1 + .../xpack/esql/analysis/VerifierTests.java | 1 + .../operator/arithmetic/NegTests.java | 2 +- .../optimizer/LogicalPlanOptimizerTests.java | 25 +++++ .../optimizer/PhysicalPlanOptimizerTests.java | 2 +- .../esql/stats/PlanExecutorMetricsTests.java | 2 +- 15 files changed, 180 insertions(+), 26 deletions(-) rename x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/{analysis => }/VerificationException.java (76%) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index d5aca3d7cea4c..ba1dd8418bbb6 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -326,6 +326,59 @@ COUNT(*):long | bucket:date 1 | 1999-01-01T00:00:00.000Z ; +autoBucketYearInAggConstRefsString#[skip:-8.12.99, reason:date type is supported in 8.13] +FROM employees +| WHERE hire_date >= "1999-01-01T00:00:00Z" +| EVAL bucket_start = "1999-01-01T00:00:00Z" +| EVAL bucket_end = NOW() +| EVAL bucket = AUTO_BUCKET(hire_date, 5, bucket_start, bucket_end) +| STATS COUNT(*) by bucket +| sort bucket; + +COUNT(*):long | bucket:date +1 | 1999-01-01T00:00:00.000Z +; + +autoBucketYearInAggConstRefsConcat#[skip:-8.12.99, reason:date type is supported in 8.13] +FROM employees +| WHERE hire_date >= "1999-01-01T00:00:00Z" +| EVAL bucket_start = CONCAT("1999-01-01", "T", "00:00:00Z") +| EVAL bucket_end = NOW() +| EVAL bucket = AUTO_BUCKET(hire_date, 5, bucket_start, bucket_end) +| STATS COUNT(*) by bucket +| sort bucket; + +COUNT(*):long | bucket:date +1 | 1999-01-01T00:00:00.000Z +; + +autoBucketYearInAggConstRefsDate#[skip:-8.12.99, reason:date type is supported in 8.13] +FROM employees +| WHERE hire_date >= "1999-01-01T00:00:00Z" +| EVAL bucket_start = TO_DATETIME("1999-01-01T00:00:00.000Z") +| EVAL bucket_end = NOW() +| EVAL bucket = AUTO_BUCKET(hire_date, 5, bucket_start, bucket_end) +| keep bucket_start, bucket_end, bucket +| STATS COUNT(*) by bucket +| sort bucket; + +COUNT(*):long | bucket:date +1 | 1999-01-01T00:00:00.000Z +; + +autoBucketYearInAggConstRefsRename#[skip:-8.12.99, reason:date type is supported in 8.13] +FROM employees +| WHERE hire_date >= "1999-01-01T00:00:00Z" +| EVAL bucket_start = "1999-01-01T00:00:00Z" +| EVAL bucket_end = NOW() +| RENAME bucket_end as be, bucket_start as bs +| STATS c = COUNT(*) by AUTO_BUCKET(hire_date, 5, bs, be) +| SORT c +; + +c:long | AUTO_BUCKET(hire_date, 5, bs, be):date +1 | 1999-01-01T00:00:00.000Z +; autoBucketMonthInAgg // tag::auto_bucket_in_agg[] diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java index 59b684cdaa2cf..080f03f77f127 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/CrossClustersEnrichIT.java @@ -35,7 +35,7 @@ import org.elasticsearch.xpack.core.enrich.action.PutEnrichPolicyAction; import org.elasticsearch.xpack.enrich.EnrichPlugin; import org.elasticsearch.xpack.esql.EsqlTestUtils; -import org.elasticsearch.xpack.esql.analysis.VerificationException; +import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.junit.After; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java index c945db951f964..3fa2c86b6ceb9 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java @@ -31,7 +31,7 @@ import org.elasticsearch.test.ListMatcher; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; -import org.elasticsearch.xpack.esql.analysis.VerificationException; +import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.parser.ParsingException; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java index 689672075fb03..6f6e41ee42b35 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java @@ -24,7 +24,7 @@ import org.elasticsearch.xpack.core.async.GetAsyncResultRequest; import org.elasticsearch.xpack.core.async.TransportDeleteAsyncResultAction; import org.elasticsearch.xpack.esql.TestBlockFactory; -import org.elasticsearch.xpack.esql.analysis.VerificationException; +import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.parser.ParsingException; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/ExceptionUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/ExceptionUtils.java index 79e82092e5b79..cb9f86d20915f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/ExceptionUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/ExceptionUtils.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql; -import org.elasticsearch.xpack.esql.analysis.VerificationException; import org.elasticsearch.xpack.ql.tree.Source; public class ExceptionUtils { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/VerificationException.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/VerificationException.java similarity index 76% rename from x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/VerificationException.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/VerificationException.java index 4372401e7d8f3..42080115cf0e2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/VerificationException.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/VerificationException.java @@ -5,9 +5,8 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.analysis; +package org.elasticsearch.xpack.esql; -import org.elasticsearch.xpack.esql.EsqlClientException; import org.elasticsearch.xpack.ql.common.Failure; import java.util.Collection; @@ -17,7 +16,7 @@ public VerificationException(String message, Object... args) { super(message, args); } - protected VerificationException(Collection sources) { + public VerificationException(Collection sources) { super(Failure.failMessage(sources)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index dfcfc702fd5cd..e9e523b715ce7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; +import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.expression.function.UnsupportedAttribute; import org.elasticsearch.xpack.esql.plan.logical.Drop; import org.elasticsearch.xpack.esql.plan.logical.Enrich; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java index 6a8b3f41a9c65..0a66e82e0d062 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java @@ -199,21 +199,7 @@ private TypeResolution resolveType(BiFunction { + + ReplaceDuplicateAggWithEval() { + super(TransformDirection.UP); + } + + @Override + protected LogicalPlan rule(Aggregate aggregate) { + LogicalPlan plan = aggregate; + + boolean foundDuplicate = false; + var aggs = aggregate.aggregates(); + Map seenAggs = Maps.newMapWithExpectedSize(aggs.size()); + List projections = new ArrayList<>(); + List keptAggs = new ArrayList<>(aggs.size()); + + for (NamedExpression agg : aggs) { + var attr = agg.toAttribute(); + if (agg instanceof Alias as && as.child() instanceof AggregateFunction af) { + var seen = seenAggs.putIfAbsent(af, attr); + if (seen != null) { + foundDuplicate = true; + projections.add(as.replaceChild(seen)); + } + // otherwise keep the agg in place + else { + keptAggs.add(agg); + projections.add(attr); + } + } else { + keptAggs.add(agg); + projections.add(attr); + } + } + + // at least one duplicate found - add the projection (to keep the output in place) + if (foundDuplicate) { + var source = aggregate.source(); + var newAggregate = new Aggregate(source, aggregate.child(), aggregate.groupings(), keptAggs); + plan = new Project(source, newAggregate, projections); + } + + return plan; + } + } + + /** + * Verify that a {@link LogicalPlan} can be executed. + * + * @param plan The logical plan to be verified. + * @throws VerificationException if the plan is invalid. + */ + LogicalPlan verifyOptimized(LogicalPlan plan) throws VerificationException { + Set failures = new LinkedHashSet<>(); + plan.forEachUp(p -> { + p.forEachExpression(AutoBucket.class, e -> { + Expression.TypeResolution resolution = isFoldable(e.from(), e.sourceText(), THIRD); + if (resolution.unresolved()) { + failures.add(fail(e, resolution.message())); + } + resolution = isFoldable(e.to(), e.sourceText(), FOURTH); + if (resolution.unresolved()) { + failures.add(fail(e, resolution.message())); + } + }); + }); + if (failures.isEmpty() == false) { + throw new VerificationException(failures); + } + + return plan; + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index a1f1aae7e6e25..1e173e52d6ac6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.enrich.ResolvedEnrichPolicy; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.expression.function.aggregate.Max; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 1257cc5ee8bd6..0e075af55fd8f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.analysis; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.parser.TypedParamValue; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java index 7066b3ee31631..f5e5e9f406f22 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegTests.java @@ -11,7 +11,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.xpack.esql.analysis.VerificationException; +import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 6cbb629a6843a..a0b186621a1dd 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.TestBlockFactory; +import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils; @@ -2921,6 +2922,30 @@ public void testNestedMultiExpressionsInGroupingAndAggs() { assertThat(Expressions.names(agg.output()), contains("count(salary + 1)", "max(salary + 23)", "languages + 1", "emp_no % 3")); } + public void testLogicalPlanOptimizerVerifier() { + var plan = plan(""" + from test + | eval bucket_start = 1, bucket_end = 100000 + | eval auto_bucket(salary, 10, bucket_start, bucket_end) + """); + var ab = as(plan, Eval.class); + assertTrue(ab.optimized()); + } + + public void testLogicalPlanOptimizerVerificationException() { + VerificationException e = expectThrows(VerificationException.class, () -> plan(""" + from test + | eval bucket_end = 100000 + | eval auto_bucket(salary, 10, emp_no, bucket_end) + """)); + assertTrue(e.getMessage().startsWith("Found ")); + final String header = "Found 1 problem\nline "; + assertEquals( + "3:8: third argument of [auto_bucket(salary, 10, emp_no, bucket_end)] must be a constant, received [emp_no]", + e.getMessage().substring(header.length()) + ); + } + /** * Expects * Project[[x{r}#5]] diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 066d6ef5afc04..1d4136216057e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -24,10 +24,10 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.EsqlTestUtils; +import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.analysis.EnrichResolution; -import org.elasticsearch.xpack.esql.analysis.VerificationException; import org.elasticsearch.xpack.esql.enrich.ResolvedEnrichPolicy; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThan; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java index c9e9a2b75ebab..06eae5d57cf16 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/stats/PlanExecutorMetricsTests.java @@ -15,9 +15,9 @@ import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.esql.EsqlTestUtils; +import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.EsqlQueryRequest; import org.elasticsearch.xpack.esql.analysis.EnrichResolution; -import org.elasticsearch.xpack.esql.analysis.VerificationException; import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolver; import org.elasticsearch.xpack.esql.execution.PlanExecutor; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; From bb016bdbe9b3db79d54a25c1aea5e5c9b4ed7260 Mon Sep 17 00:00:00 2001 From: Jonathan Buttner <56361221+jonathan-buttner@users.noreply.github.com> Date: Tue, 6 Feb 2024 16:21:23 -0500 Subject: [PATCH 089/106] [ML] Inference service should reject tasks during shutdown (#105213) * Fixing inference shutdown bug * Update docs/changelog/105213.yaml --------- Co-authored-by: Elastic Machine --- docs/changelog/105213.yaml | 5 ++ .../http/sender/RequestExecutorService.java | 2 + .../sender/RequestExecutorServiceTests.java | 70 ++++++++++++++++--- 3 files changed, 66 insertions(+), 11 deletions(-) create mode 100644 docs/changelog/105213.yaml diff --git a/docs/changelog/105213.yaml b/docs/changelog/105213.yaml new file mode 100644 index 0000000000000..40595a8166ef2 --- /dev/null +++ b/docs/changelog/105213.yaml @@ -0,0 +1,5 @@ +pr: 105213 +summary: Inference service should reject tasks during shutdown +area: Machine Learning +type: bug +issues: [] diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java index 47b4d49b8f46e..f844787455290 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorService.java @@ -186,8 +186,10 @@ private void handleTasks() throws InterruptedException { command.run(); } + // TODO add logic to complete pending items in the queue before shutting down if (running.get() == false) { logger.debug(() -> format("Http executor service [%s] exiting", serviceName)); + rejectTaskBecauseOfShutdown(task); } else { executeTask(task); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java index a4282bbef058d..ef8731746e187 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestExecutorServiceTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.xpack.inference.external.request.HttpRequestTests; import org.junit.After; import org.junit.Before; +import org.mockito.ArgumentCaptor; import java.io.IOException; import java.util.concurrent.BlockingQueue; @@ -93,16 +94,18 @@ public void testIsTerminated_IsTrue() throws InterruptedException { public void testIsTerminated_AfterStopFromSeparateThread() throws Exception { var waitToShutdown = new CountDownLatch(1); + var waitToReturnFromSend = new CountDownLatch(1); var mockHttpClient = mock(HttpClient.class); doAnswer(invocation -> { waitToShutdown.countDown(); + waitToReturnFromSend.await(TIMEOUT.getSeconds(), TimeUnit.SECONDS); return Void.TYPE; }).when(mockHttpClient).send(any(), any(), any()); var service = createRequestExecutorService(mockHttpClient, null); - Future executorTermination = submitShutdownRequest(waitToShutdown, service); + Future executorTermination = submitShutdownRequest(waitToShutdown, waitToReturnFromSend, service); PlainActionFuture listener = new PlainActionFuture<>(); service.execute(HttpRequestTests.createMock("inferenceEntityId"), null, listener); @@ -277,9 +280,43 @@ public void testQueueTake_ThrowingInterruptedException_TerminatesService() throw verify(queue, times(1)).take(); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105155") + public void testQueueTake_RejectsTask_WhenServiceShutsDown() throws Exception { + var mockTask = mock(AbstractRunnable.class); + @SuppressWarnings("unchecked") + BlockingQueue queue = mock(LinkedBlockingQueue.class); + + var service = new RequestExecutorService( + "test_service", + mock(HttpClient.class), + threadPool, + mockQueueCreator(queue), + null, + createRequestExecutorServiceSettingsEmpty() + ); + + doAnswer(invocation -> { + service.shutdown(); + return mockTask; + }).doReturn(new NoopTask()).when(queue).take(); + + service.start(); + + assertTrue(service.isTerminated()); + verify(queue, times(1)).take(); + + ArgumentCaptor argument = ArgumentCaptor.forClass(Exception.class); + verify(mockTask, times(1)).onRejection(argument.capture()); + assertThat(argument.getValue(), instanceOf(EsRejectedExecutionException.class)); + assertThat( + argument.getValue().getMessage(), + is("Failed to send request, queue service [test_service] has shutdown prior to executing request") + ); + + var rejectionException = (EsRejectedExecutionException) argument.getValue(); + assertTrue(rejectionException.isExecutorShutdown()); + } + public void testChangingCapacity_SetsCapacityToTwo() throws ExecutionException, InterruptedException, TimeoutException, IOException { - var waitToShutdown = new CountDownLatch(1); var httpClient = mock(HttpClient.class); var settings = createRequestExecutorServiceSettings(1); @@ -299,13 +336,16 @@ public void testChangingCapacity_SetsCapacityToTwo() throws ExecutionException, settings.setQueueCapacity(2); + var waitToShutdown = new CountDownLatch(1); + var waitToReturnFromSend = new CountDownLatch(1); // There is a request already queued, and its execution path will initiate shutting down the service doAnswer(invocation -> { waitToShutdown.countDown(); + waitToReturnFromSend.await(TIMEOUT.getSeconds(), TimeUnit.SECONDS); return Void.TYPE; }).when(httpClient).send(any(), any(), any()); - Future executorTermination = submitShutdownRequest(waitToShutdown, service); + Future executorTermination = submitShutdownRequest(waitToShutdown, waitToReturnFromSend, service); service.start(); @@ -314,10 +354,8 @@ public void testChangingCapacity_SetsCapacityToTwo() throws ExecutionException, assertThat(service.remainingQueueCapacity(), is(2)); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105155") public void testChangingCapacity_DoesNotRejectsOverflowTasks_BecauseOfQueueFull() throws IOException, ExecutionException, InterruptedException, TimeoutException { - var waitToShutdown = new CountDownLatch(1); var httpClient = mock(HttpClient.class); var settings = createRequestExecutorServiceSettings(3); @@ -332,13 +370,16 @@ public void testChangingCapacity_DoesNotRejectsOverflowTasks_BecauseOfQueueFull( settings.setQueueCapacity(1); + var waitToShutdown = new CountDownLatch(1); + var waitToReturnFromSend = new CountDownLatch(1); // There is a request already queued, and its execution path will initiate shutting down the service doAnswer(invocation -> { waitToShutdown.countDown(); + waitToReturnFromSend.await(TIMEOUT.getSeconds(), TimeUnit.SECONDS); return Void.TYPE; }).when(httpClient).send(any(), any(), any()); - Future executorTermination = submitShutdownRequest(waitToShutdown, service); + Future executorTermination = submitShutdownRequest(waitToShutdown, waitToReturnFromSend, service); service.start(); @@ -358,10 +399,8 @@ public void testChangingCapacity_DoesNotRejectsOverflowTasks_BecauseOfQueueFull( assertTrue(thrownException.isExecutorShutdown()); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105155") public void testChangingCapacity_ToZero_SetsQueueCapacityToUnbounded() throws IOException, ExecutionException, InterruptedException, TimeoutException { - var waitToShutdown = new CountDownLatch(1); var httpClient = mock(HttpClient.class); var settings = createRequestExecutorServiceSettings(1); @@ -381,13 +420,16 @@ public void testChangingCapacity_ToZero_SetsQueueCapacityToUnbounded() throws IO settings.setQueueCapacity(0); + var waitToShutdown = new CountDownLatch(1); + var waitToReturnFromSend = new CountDownLatch(1); // There is a request already queued, and its execution path will initiate shutting down the service doAnswer(invocation -> { waitToShutdown.countDown(); + waitToReturnFromSend.await(TIMEOUT.getSeconds(), TimeUnit.SECONDS); return Void.TYPE; }).when(httpClient).send(any(), any(), any()); - Future executorTermination = submitShutdownRequest(waitToShutdown, service); + Future executorTermination = submitShutdownRequest(waitToShutdown, waitToReturnFromSend, service); service.start(); @@ -396,12 +438,18 @@ public void testChangingCapacity_ToZero_SetsQueueCapacityToUnbounded() throws IO assertThat(service.remainingQueueCapacity(), is(Integer.MAX_VALUE)); } - private Future submitShutdownRequest(CountDownLatch waitToShutdown, RequestExecutorService service) { + private Future submitShutdownRequest( + CountDownLatch waitToShutdown, + CountDownLatch waitToReturnFromSend, + RequestExecutorService service + ) { return threadPool.generic().submit(() -> { try { // wait for a task to be added to be executed before beginning shutdown waitToShutdown.await(TIMEOUT.getSeconds(), TimeUnit.SECONDS); service.shutdown(); + // tells send to return + waitToReturnFromSend.countDown(); service.awaitTermination(TIMEOUT.getSeconds(), TimeUnit.SECONDS); } catch (Exception e) { fail(Strings.format("Failed to shutdown executor: %s", e)); From 6b6fb71cf307bbcffb2b78e590f5420c6d3cf5b1 Mon Sep 17 00:00:00 2001 From: James Baiera Date: Tue, 6 Feb 2024 16:31:03 -0500 Subject: [PATCH 090/106] Remove non-portable newline from test (#105209) Co-authored-by: Elastic Machine --- .../action/bulk/FailureStoreDocumentTests.java | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/action/bulk/FailureStoreDocumentTests.java b/server/src/test/java/org/elasticsearch/action/bulk/FailureStoreDocumentTests.java index 92fa67e9a6ffc..962c796e18c2a 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/FailureStoreDocumentTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/FailureStoreDocumentTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.xcontent.ObjectPath; import org.elasticsearch.xcontent.json.JsonXContent; +import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; @@ -58,10 +59,11 @@ public void testFailureStoreDocumentConverstion() throws Exception { assertThat(ObjectPath.eval("error.message", convertedRequest.sourceAsMap()), is(equalTo("Test exception please ignore"))); assertThat( ObjectPath.eval("error.stack_trace", convertedRequest.sourceAsMap()), - startsWith( - "org.elasticsearch.ElasticsearchException: Test exception please ignore\n" - + "\tat org.elasticsearch.action.bulk.FailureStoreDocumentTests.testFailureStoreDocumentConverstion" - ) + startsWith("org.elasticsearch.ElasticsearchException: Test exception please ignore") + ); + assertThat( + ObjectPath.eval("error.stack_trace", convertedRequest.sourceAsMap()), + containsString("at org.elasticsearch.action.bulk.FailureStoreDocumentTests.testFailureStoreDocumentConverstion") ); assertThat(convertedRequest.isWriteToFailureStore(), is(true)); From 7c039b172836b90a77099ca5ba79115dca979b0e Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 6 Feb 2024 16:42:01 -0800 Subject: [PATCH 091/106] AwaitsFix more tests for #104838 --- .../elasticsearch/xpack/search/AsyncSearchResponseTests.java | 2 ++ .../xpack/search/RestSubmitAsyncSearchActionTests.java | 2 ++ 2 files changed, 4 insertions(+) diff --git a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchResponseTests.java b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchResponseTests.java index f3d6f352db186..afabd8c7a7bc3 100644 --- a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchResponseTests.java +++ b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchResponseTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.search; import org.apache.lucene.index.CorruptIndexException; +import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.TransportVersion; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchResponse; @@ -47,6 +48,7 @@ import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.core.async.GetAsyncResultRequestTests.randomSearchId; +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104838") public class AsyncSearchResponseTests extends ESTestCase { private final SearchResponse searchResponse = randomSearchResponse(randomBoolean()); private NamedWriteableRegistry namedWriteableRegistry; diff --git a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchActionTests.java b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchActionTests.java index 0130746ab1702..c319ed99c1841 100644 --- a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchActionTests.java +++ b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchActionTests.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.search; +import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.SetOnce; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -29,6 +30,7 @@ import static org.hamcrest.Matchers.instanceOf; import static org.mockito.Mockito.mock; +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104838") public class RestSubmitAsyncSearchActionTests extends RestActionTestCase { private RestSubmitAsyncSearchAction action; From 5afe81cd7523ded2c2d71cafd4490ee84f40ac7f Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 7 Feb 2024 07:34:20 +0100 Subject: [PATCH 092/106] Optimize SearchHit#resolveLookupFields a little (#105222) No need to copy the keyset here to do iteration + mutation. Just update the map entries directly to save a few cycles. --- .../org/elasticsearch/search/SearchHit.java | 36 +++++++++---------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/SearchHit.java b/server/src/main/java/org/elasticsearch/search/SearchHit.java index dde044bf15115..5bee2d4a557b2 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHit.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHit.java @@ -55,6 +55,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; +import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -578,25 +579,24 @@ public void resolveLookupFields(Map> lookupResults) { if (lookupResults.isEmpty()) { return; } - final List fields = new ArrayList<>(documentFields.keySet()); - for (String field : fields) { - documentFields.computeIfPresent(field, (k, docField) -> { - if (docField.getLookupFields().isEmpty()) { - return docField; - } - final List newValues = new ArrayList<>(docField.getValues()); - for (LookupField lookupField : docField.getLookupFields()) { - final List resolvedValues = lookupResults.get(lookupField); - if (resolvedValues != null) { - newValues.addAll(resolvedValues); - } - } - if (newValues.isEmpty() && docField.getIgnoredValues().isEmpty()) { - return null; - } else { - return new DocumentField(docField.getName(), newValues, docField.getIgnoredValues()); + for (Iterator> iterator = documentFields.entrySet().iterator(); iterator.hasNext();) { + Map.Entry entry = iterator.next(); + final DocumentField docField = entry.getValue(); + if (docField.getLookupFields().isEmpty()) { + continue; + } + final List newValues = new ArrayList<>(docField.getValues()); + for (LookupField lookupField : docField.getLookupFields()) { + final List resolvedValues = lookupResults.get(lookupField); + if (resolvedValues != null) { + newValues.addAll(resolvedValues); } - }); + } + if (newValues.isEmpty() && docField.getIgnoredValues().isEmpty()) { + iterator.remove(); + } else { + entry.setValue(new DocumentField(docField.getName(), newValues, docField.getIgnoredValues())); + } } assert hasLookupFields() == false : "Some lookup fields are not resolved"; } From 334aa1bc8dac0ad108b172c8aaa76893777baf19 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20Fred=C3=A9n?= <109296772+jfreden@users.noreply.github.com> Date: Wed, 7 Feb 2024 08:49:39 +0100 Subject: [PATCH 093/106] Add support for fetching user profileId in Query Users (#104923) Add support for fetching user profileId in Query Users --- .../rest-api/security/query-user.asciidoc | 47 ++++ .../api/security.query_user.json | 7 + .../action/user/QueryUserRequest.java | 12 +- .../action/user/QueryUserResponse.java | 5 +- .../action/user/QueryUserRequestTests.java | 9 +- .../xpack/security/QueryUserIT.java | 129 ++++++--- .../action/user/TransportQueryUserAction.java | 79 +++++- .../authc/esnative/NativeUsersStore.java | 25 +- .../rest/action/user/RestQueryUserAction.java | 6 +- .../user/TransportQueryUserActionTests.java | 251 ++++++++++++++++++ .../rest-api-spec/test/users/40_query.yml | 13 + 11 files changed, 532 insertions(+), 51 deletions(-) diff --git a/docs/reference/rest-api/security/query-user.asciidoc b/docs/reference/rest-api/security/query-user.asciidoc index 08ead0f389ee9..fae5b9914a05e 100644 --- a/docs/reference/rest-api/security/query-user.asciidoc +++ b/docs/reference/rest-api/security/query-user.asciidoc @@ -62,6 +62,13 @@ The email of the user. `enabled`:: Specifies whether the user is enabled. +[[security-api-query-user-query-params]] +==== {api-query-parms-title} + +`with_profile_uid`:: +(Optional, boolean) Determines whether to retrieve the <> `uid`, +if exists, for the users. Defaults to `false`. + ==== include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=from] @@ -218,6 +225,46 @@ A successful call returns a JSON structure for a user: -------------------------------------------------- // NOTCONSOLE +To retrieve the user `profile_uid` as part of the response: + +[source,console] +-------------------------------------------------- +GET /_security/_query/user?with_profile_uid=true +{ + "query": { + "prefix": { + "roles": "other" + } + } +} +-------------------------------------------------- +// TEST[setup:jacknich_user] + +[source,console-result] +-------------------------------------------------- +{ + "total": 1, + "count": 1, + "users": [ + { + "username": "jacknich", + "roles": [ + "admin", + "other_role1" + ], + "full_name": "Jack Nicholson", + "email": "jacknich@example.com", + "metadata": { + "intelligence": 7 + }, + "enabled": true, + "profile_uid": "u_79HkWkwmnBH5gqFKwoxggWPjEBOur1zLPXQPEl1VBW0_0" + } + ] +} +-------------------------------------------------- +// NOTCONSOLE + Use a `bool` query to issue complex logical conditions and use `from`, `size`, `sort` to help paginate the result: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.query_user.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.query_user.json index 6d76126ba81c4..9793e424e6f52 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.query_user.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.query_user.json @@ -28,6 +28,13 @@ "body": { "description": "From, size, query, sort and search_after", "required": false + }, + "params": { + "with_profile_uid": { + "type": "boolean", + "default": false, + "description": "flag to retrieve profile uid (if exists) associated with the user" + } } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/QueryUserRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/QueryUserRequest.java index 6db7e93b66eda..c5f804d39020a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/QueryUserRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/QueryUserRequest.java @@ -38,12 +38,14 @@ public final class QueryUserRequest extends ActionRequest { @Nullable private final SearchAfterBuilder searchAfterBuilder; + private final boolean withProfileUid; + public QueryUserRequest() { this(null); } public QueryUserRequest(QueryBuilder queryBuilder) { - this(queryBuilder, null, null, null, null); + this(queryBuilder, null, null, null, null, false); } public QueryUserRequest( @@ -51,13 +53,15 @@ public QueryUserRequest( @Nullable Integer from, @Nullable Integer size, @Nullable List fieldSortBuilders, - @Nullable SearchAfterBuilder searchAfterBuilder + @Nullable SearchAfterBuilder searchAfterBuilder, + boolean withProfileUid ) { this.queryBuilder = queryBuilder; this.from = from; this.size = size; this.fieldSortBuilders = fieldSortBuilders; this.searchAfterBuilder = searchAfterBuilder; + this.withProfileUid = withProfileUid; } public QueryBuilder getQueryBuilder() { @@ -96,4 +100,8 @@ public ActionRequestValidationException validate() { public void writeTo(StreamOutput out) throws IOException { TransportAction.localOnly(); } + + public boolean isWithProfileUid() { + return withProfileUid; + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/QueryUserResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/QueryUserResponse.java index 57d156cf05ca0..0d44d5d64a4a6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/QueryUserResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/QueryUserResponse.java @@ -68,7 +68,7 @@ public void writeTo(StreamOutput out) throws IOException { TransportAction.localOnly(); } - public record Item(User user, @Nullable Object[] sortValues) implements ToXContentObject { + public record Item(User user, @Nullable Object[] sortValues, @Nullable String profileUid) implements ToXContentObject { @Override public Object[] sortValues() { @@ -82,6 +82,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (sortValues != null && sortValues.length > 0) { builder.array("_sort", sortValues); } + if (profileUid != null) { + builder.field("profile_uid", profileUid); + } builder.endObject(); return builder; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/user/QueryUserRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/user/QueryUserRequestTests.java index e7d8ef0b65e39..481fccc4be575 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/user/QueryUserRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/user/QueryUserRequestTests.java @@ -18,7 +18,8 @@ public void testValidate() { randomIntBetween(0, Integer.MAX_VALUE), randomIntBetween(0, Integer.MAX_VALUE), null, - null + null, + false ); assertThat(request1.validate(), nullValue()); @@ -27,7 +28,8 @@ public void testValidate() { randomIntBetween(Integer.MIN_VALUE, -1), randomIntBetween(0, Integer.MAX_VALUE), null, - null + null, + false ); assertThat(request2.validate().getMessage(), containsString("[from] parameter cannot be negative")); @@ -36,7 +38,8 @@ public void testValidate() { randomIntBetween(0, Integer.MAX_VALUE), randomIntBetween(Integer.MIN_VALUE, -1), null, - null + null, + false ); assertThat(request3.validate().getMessage(), containsString("[size] parameter cannot be negative")); } diff --git a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryUserIT.java b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryUserIT.java index 1bd3e9ed927fe..0d217d201731c 100644 --- a/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryUserIT.java +++ b/x-pack/plugin/security/qa/security-basic/src/javaRestTest/java/org/elasticsearch/xpack/security/QueryUserIT.java @@ -12,6 +12,7 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Strings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.security.user.User; @@ -20,6 +21,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -47,15 +49,22 @@ public class QueryUserIT extends SecurityInBasicRestTestCase { ); private Request queryUserRequestWithAuth() { - final Request request = new Request(randomFrom("POST", "GET"), "/_security/_query/user"); + return queryUserRequestWithAuth(false); + } + + private Request queryUserRequestWithAuth(boolean withProfileId) { + final Request request = new Request( + randomFrom("POST", "GET"), + "/_security/_query/user" + (withProfileId ? "?with_profile_uid=true" : randomFrom("", "?with_profile_uid=false")) + ); request.setOptions(request.getOptions().toBuilder().addHeader(HttpHeaders.AUTHORIZATION, READ_USERS_USER_AUTH_HEADER)); return request; } public void testQuery() throws IOException { + boolean withProfileId = randomBoolean(); // No users to match yet - assertQuery("", users -> assertThat(users, empty())); - + assertQuery("", users -> assertThat(users, empty()), withProfileId); int randomUserCount = createRandomUsers().size(); // An empty request body means search for all users (page size = 10) @@ -65,7 +74,8 @@ public void testQuery() throws IOException { assertQuery( String.format(""" {"query":{"match_all":{}},"from":0,"size":%s}""", randomUserCount), - users -> assertThat(users.size(), equalTo(randomUserCount)) + users -> assertThat(users.size(), equalTo(randomUserCount)), + withProfileId ); // Exists query @@ -73,7 +83,8 @@ public void testQuery() throws IOException { assertQuery( String.format(""" {"query":{"exists":{"field":"%s"}},"from":0,"size":%s}""", field, randomUserCount), - users -> assertEquals(users.size(), randomUserCount) + users -> assertEquals(users.size(), randomUserCount), + withProfileId ); // Prefix search @@ -93,28 +104,41 @@ public void testQuery() throws IOException { Map.of(), true ); + // Extract map to be able to assert on profile id (not part of User model) + Map prefixUser1Map; + Map prefixUser2Map; + if (withProfileId) { + prefixUser1Map = userToMap(prefixUser1, doActivateProfile(prefixUser1.principal(), "100%-security-guaranteed")); + prefixUser2Map = userToMap(prefixUser2, doActivateProfile(prefixUser2.principal(), "100%-security-guaranteed")); + assertTrue(prefixUser1Map.containsKey("profile_uid")); + assertTrue(prefixUser2Map.containsKey("profile_uid")); + } else { + prefixUser1Map = userToMap(prefixUser1); + prefixUser2Map = userToMap(prefixUser2); + } + assertQuery(""" {"query":{"bool":{"must":[{"prefix":{"roles":"master-of-the"}}]}},"sort":["username"]}""", returnedUsers -> { assertThat(returnedUsers, hasSize(2)); - assertUser(prefixUser1, returnedUsers.get(0)); - assertUser(prefixUser2, returnedUsers.get(1)); - }); + assertUser(prefixUser1Map, returnedUsers.get(0)); + assertUser(prefixUser2Map, returnedUsers.get(1)); + }, withProfileId); // Wildcard search assertQuery(""" { "query": { "wildcard": {"username": "mr-prefix*"} },"sort":["username"]}""", users -> { assertThat(users.size(), equalTo(2)); - assertUser(prefixUser1, users.get(0)); - assertUser(prefixUser2, users.get(1)); - }); + assertUser(prefixUser1Map, users.get(0)); + assertUser(prefixUser2Map, users.get(1)); + }, withProfileId); // Terms query assertQuery(""" {"query":{"terms":{"roles":["some-other-role"]}},"sort":["username"]}""", users -> { assertThat(users.size(), equalTo(2)); - assertUser(prefixUser1, users.get(0)); - assertUser(prefixUser2, users.get(1)); - }); + assertUser(prefixUser1Map, users.get(0)); + assertUser(prefixUser2Map, users.get(1)); + }, withProfileId); // Test other fields User otherFieldsTestUser = createUser( @@ -136,17 +160,27 @@ public void testQuery() throws IOException { users -> assertThat( users.stream().map(u -> u.get(User.Fields.USERNAME.getPreferredName()).toString()).toList(), hasItem("batman-official-user") - ) + ), + withProfileId ); - + Map otherFieldsTestUserMap; + if (withProfileId) { + otherFieldsTestUserMap = userToMap( + otherFieldsTestUser, + doActivateProfile(otherFieldsTestUser.principal(), "100%-security-guaranteed") + ); + assertTrue(otherFieldsTestUserMap.containsKey("profile_uid")); + } else { + otherFieldsTestUserMap = userToMap(otherFieldsTestUser); + } // Test complex query assertQuery(""" { "query": {"bool": {"must": [ {"wildcard": {"username": "batman-official*"}}, {"term": {"enabled": true}}],"filter": [{"prefix": {"roles": "bat-cave"}}]}}}""", users -> { assertThat(users.size(), equalTo(1)); - assertUser(otherFieldsTestUser, users.get(0)); - }); + assertUser(otherFieldsTestUserMap, users.get(0)); + }, withProfileId); // Search for fields outside the allowlist fails assertQueryError(400, """ @@ -223,7 +257,7 @@ public void testPagination() throws IOException { assertUsers(users, allUserInfos, sortField, from); // size can be zero, but total should still reflect the number of users matched - final Request request = queryUserRequestWithAuth(); + final Request request = queryUserRequestWithAuth(false); request.setJsonEntity("{\"size\":0}"); final Response response = client().performRequest(request); assertOK(response); @@ -348,7 +382,11 @@ private void assertQueryError(String authHeader, int statusCode, String body, St } private void assertQuery(String body, Consumer>> userVerifier) throws IOException { - final Request request = queryUserRequestWithAuth(); + assertQuery(body, userVerifier, false); + } + + private void assertQuery(String body, Consumer>> userVerifier, boolean withProfileId) throws IOException { + final Request request = queryUserRequestWithAuth(withProfileId); request.setJsonEntity(body); final Response response = client().performRequest(request); assertOK(response); @@ -369,6 +407,8 @@ private void assertUser(Map expectedUser, Map ac ((List) expectedUser.get(User.Fields.ROLES.getPreferredName())).toArray(), ((List) actualUser.get(User.Fields.ROLES.getPreferredName())).toArray() ); + assertEquals(expectedUser.getOrDefault("profile_uid", null), actualUser.getOrDefault("profile_uid", null)); + assertEquals(expectedUser.get(User.Fields.FULL_NAME.getPreferredName()), actualUser.get(User.Fields.FULL_NAME.getPreferredName())); assertEquals(expectedUser.get(User.Fields.EMAIL.getPreferredName()), actualUser.get(User.Fields.EMAIL.getPreferredName())); assertEquals(expectedUser.get(User.Fields.METADATA.getPreferredName()), actualUser.get(User.Fields.METADATA.getPreferredName())); @@ -376,20 +416,21 @@ private void assertUser(Map expectedUser, Map ac } private Map userToMap(User user) { - return Map.of( - User.Fields.USERNAME.getPreferredName(), - user.principal(), - User.Fields.ROLES.getPreferredName(), - Arrays.stream(user.roles()).toList(), - User.Fields.FULL_NAME.getPreferredName(), - user.fullName(), - User.Fields.EMAIL.getPreferredName(), - user.email(), - User.Fields.METADATA.getPreferredName(), - user.metadata(), - User.Fields.ENABLED.getPreferredName(), - user.enabled() - ); + return userToMap(user, null); + } + + private Map userToMap(User user, @Nullable String profileId) { + Map userMap = new HashMap<>(); + userMap.put(User.Fields.USERNAME.getPreferredName(), user.principal()); + userMap.put(User.Fields.ROLES.getPreferredName(), Arrays.stream(user.roles()).toList()); + userMap.put(User.Fields.FULL_NAME.getPreferredName(), user.fullName()); + userMap.put(User.Fields.EMAIL.getPreferredName(), user.email()); + userMap.put(User.Fields.METADATA.getPreferredName(), user.metadata()); + userMap.put(User.Fields.ENABLED.getPreferredName(), user.enabled()); + if (profileId != null) { + userMap.put("profile_uid", profileId); + } + return userMap; } private void assertUsers(List expectedUsers, List> actualUsers, String sortField, int from) { @@ -423,6 +464,26 @@ public static Map randomUserMetadata() { ); } + private String doActivateProfile(String username, String password) { + final Request activateProfileRequest = new Request("POST", "_security/profile/_activate"); + activateProfileRequest.setJsonEntity(org.elasticsearch.common.Strings.format(""" + { + "grant_type": "password", + "username": "%s", + "password": "%s" + }""", username, password)); + + final Response activateProfileResponse; + + try { + activateProfileResponse = adminClient().performRequest(activateProfileRequest); + assertOK(activateProfileResponse); + return responseAsMap(activateProfileResponse).get("uid").toString(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + private List createRandomUsers() throws IOException { int randomUserCount = randomIntBetween(8, 15); final List users = new ArrayList<>(randomUserCount); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserAction.java index 2a9aef73ff62a..c5a8795779f08 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserAction.java @@ -7,11 +7,14 @@ package org.elasticsearch.xpack.security.action.user; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.tasks.Task; @@ -19,24 +22,47 @@ import org.elasticsearch.xpack.core.security.action.ActionTypes; import org.elasticsearch.xpack.core.security.action.user.QueryUserRequest; import org.elasticsearch.xpack.core.security.action.user.QueryUserResponse; +import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.authc.Subject; +import org.elasticsearch.xpack.core.security.authc.esnative.NativeRealmSettings; +import org.elasticsearch.xpack.security.authc.Realms; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; +import org.elasticsearch.xpack.security.profile.ProfileService; import org.elasticsearch.xpack.security.support.UserBoolQueryBuilder; import java.util.List; import java.util.Locale; +import java.util.Map; +import java.util.Objects; import java.util.Set; +import java.util.stream.Collectors; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_MAIN_ALIAS; import static org.elasticsearch.xpack.security.support.UserBoolQueryBuilder.USER_FIELD_NAME_TRANSLATOR; public final class TransportQueryUserAction extends TransportAction { private final NativeUsersStore usersStore; + private final ProfileService profileService; + private final Authentication.RealmRef nativeRealmRef; private static final Set FIELD_NAMES_WITH_SORT_SUPPORT = Set.of("username", "roles", "enabled"); @Inject - public TransportQueryUserAction(TransportService transportService, ActionFilters actionFilters, NativeUsersStore usersStore) { + public TransportQueryUserAction( + TransportService transportService, + ActionFilters actionFilters, + NativeUsersStore usersStore, + ProfileService profileService, + Realms realms + ) { super(ActionTypes.QUERY_USER_ACTION.name(), actionFilters, transportService.getTaskManager()); this.usersStore = usersStore; + this.profileService = profileService; + this.nativeRealmRef = realms.getRealmRefs() + .values() + .stream() + .filter(realmRef -> NativeRealmSettings.TYPE.equals(realmRef.getType())) + .findFirst() + .orElseThrow(() -> new IllegalStateException("native realm realm ref not found")); } @Override @@ -64,7 +90,56 @@ protected void doExecute(Task task, QueryUserRequest request, ActionListener { + if (request.isWithProfileUid()) { + resolveProfileUids(queryUserResults, listener); + } else { + List queryUserResponseResults = queryUserResults.userQueryResult() + .stream() + .map(queryUserResult -> new QueryUserResponse.Item(queryUserResult.user(), queryUserResult.sortValues(), null)) + .toList(); + listener.onResponse(new QueryUserResponse(queryUserResults.total(), queryUserResponseResults)); + } + }, listener::onFailure)); + } + + private void resolveProfileUids(NativeUsersStore.QueryUserResults queryUserResults, ActionListener listener) { + final List subjects = queryUserResults.userQueryResult() + .stream() + .map(item -> new Subject(item.user(), nativeRealmRef)) + .toList(); + + profileService.searchProfilesForSubjects(subjects, ActionListener.wrap(resultsAndErrors -> { + if (resultsAndErrors == null || resultsAndErrors.errors().isEmpty()) { + final Map profileUidLookup = resultsAndErrors == null + ? Map.of() + : resultsAndErrors.results() + .stream() + .filter(t -> Objects.nonNull(t.v2())) + .map(t -> new Tuple<>(t.v1().getUser().principal(), t.v2().uid())) + .collect(Collectors.toUnmodifiableMap(Tuple::v1, Tuple::v2)); + + List queryUserResponseResults = queryUserResults.userQueryResult() + .stream() + .map( + userResult -> new QueryUserResponse.Item( + userResult.user(), + userResult.sortValues(), + profileUidLookup.getOrDefault(userResult.user().principal(), null) + ) + ) + .toList(); + listener.onResponse(new QueryUserResponse(queryUserResults.total(), queryUserResponseResults)); + } else { + final ElasticsearchStatusException exception = new ElasticsearchStatusException( + "failed to retrieve profile for users. please retry without fetching profile uid (with_profile_uid=false)", + RestStatus.INTERNAL_SERVER_ERROR + ); + resultsAndErrors.errors().values().forEach(exception::addSuppressed); + listener.onFailure(exception); + } + }, listener::onFailure)); } // package private for testing diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java index 81aa487f73e2c..9cd1963a1dda0 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java @@ -44,7 +44,6 @@ import org.elasticsearch.xpack.core.security.action.user.ChangePasswordRequest; import org.elasticsearch.xpack.core.security.action.user.DeleteUserRequest; import org.elasticsearch.xpack.core.security.action.user.PutUserRequest; -import org.elasticsearch.xpack.core.security.action.user.QueryUserResponse; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.esnative.ClientReservedRealm; import org.elasticsearch.xpack.core.security.authc.support.Hasher; @@ -164,11 +163,11 @@ public void getUsers(String[] userNames, final ActionListener> } } - public void queryUsers(SearchRequest searchRequest, ActionListener listener) { + public void queryUsers(SearchRequest searchRequest, ActionListener listener) { final SecurityIndexManager frozenSecurityIndex = securityIndex.defensiveCopy(); if (frozenSecurityIndex.indexExists() == false) { logger.debug("security index does not exist"); - listener.onResponse(QueryUserResponse.emptyResponse()); + listener.onResponse(QueryUserResults.EMPTY); } else if (frozenSecurityIndex.isAvailable(SEARCH_SHARDS) == false) { listener.onFailure(frozenSecurityIndex.getUnavailableReason(SEARCH_SHARDS)); } else { @@ -183,15 +182,15 @@ public void queryUsers(SearchRequest searchRequest, ActionListener userItem = Arrays.stream(searchResponse.getHits().getHits()).map(hit -> { + final List userItems = Arrays.stream(searchResponse.getHits().getHits()).map(hit -> { UserAndPassword userAndPassword = transformUser(hit.getId(), hit.getSourceAsMap()); - return userAndPassword != null ? new QueryUserResponse.Item(userAndPassword.user(), hit.getSortValues()) : null; + return userAndPassword != null ? new QueryUserResult(userAndPassword.user(), hit.getSortValues()) : null; }).filter(Objects::nonNull).toList(); - listener.onResponse(new QueryUserResponse(total, userItem)); + listener.onResponse(new QueryUserResults(userItems, total)); }, listener::onFailure) ) ); @@ -839,4 +838,16 @@ static ReservedUserInfo defaultDisabledUserInfo() { return new ReservedUserInfo(new char[0], false); } } + + /** + * Result record for every document matching a user + */ + public record QueryUserResult(User user, Object[] sortValues) {} + + /** + * Total result for a Query User query + */ + public record QueryUserResults(List userQueryResult, long total) { + public static final QueryUserResults EMPTY = new QueryUserResults(List.of(), 0); + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestQueryUserAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestQueryUserAction.java index 407fe36fa82d3..a97243c55d59a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestQueryUserAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestQueryUserAction.java @@ -89,6 +89,7 @@ public String getName() { @Override protected RestChannelConsumer innerPrepareRequest(final RestRequest request, final NodeClient client) throws IOException { + final boolean withProfileUid = request.paramAsBoolean("with_profile_uid", false); final QueryUserRequest queryUserRequest; if (request.hasContentOrSourceParam()) { final Payload payload = PARSER.parse(request.contentOrSourceParamParser(), null); @@ -97,10 +98,11 @@ protected RestChannelConsumer innerPrepareRequest(final RestRequest request, fin payload.from, payload.size, payload.fieldSortBuilders, - payload.searchAfterBuilder + payload.searchAfterBuilder, + withProfileUid ); } else { - queryUserRequest = new QueryUserRequest(null, null, null, null, null); + queryUserRequest = new QueryUserRequest(null, null, null, null, null, withProfileUid); } return channel -> client.execute(ActionTypes.QUERY_USER_ACTION, queryUserRequest, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserActionTests.java index aa5f935998757..1c14da149cbd3 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportQueryUserActionTests.java @@ -7,18 +7,62 @@ package org.elasticsearch.xpack.security.action.user; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.NestedSortBuilder; import org.elasticsearch.search.sort.SortMode; import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.security.action.profile.Profile; +import org.elasticsearch.xpack.core.security.action.user.QueryUserRequest; +import org.elasticsearch.xpack.core.security.action.user.QueryUserResponse; +import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.authc.RealmConfig; +import org.elasticsearch.xpack.core.security.authc.Subject; +import org.elasticsearch.xpack.core.security.authc.esnative.NativeRealmSettings; +import org.elasticsearch.xpack.core.security.user.User; +import org.elasticsearch.xpack.security.authc.Realms; +import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; +import org.elasticsearch.xpack.security.profile.ProfileService; +import org.mockito.ArgumentMatchers; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Locale; +import java.util.Map; +import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; import java.util.stream.IntStream; +import static org.elasticsearch.test.ActionListenerUtils.anyActionListener; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; +import static org.mockito.ArgumentMatchers.anyList; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class TransportQueryUserActionTests extends ESTestCase { private static final String[] allowedIndexFieldNames = new String[] { "username", "roles", "enabled" }; @@ -68,6 +112,213 @@ public void testNestedSortingOnTextFieldsNotAllowed() { assertThat(e.getMessage(), equalTo(String.format(Locale.ROOT, "sorting is not supported for field [%s] in User query", fieldName))); } + public void testQueryUsers() { + final List storeUsers = randomFrom( + Collections.singletonList(new User("joe")), + Arrays.asList(new User("jane"), new User("fred")), + randomUsers() + ); + final boolean profileIndexExists = randomBoolean(); + NativeUsersStore usersStore = mock(NativeUsersStore.class); + + TransportService transportService = new TransportService( + Settings.EMPTY, + mock(Transport.class), + mock(ThreadPool.class), + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + x -> null, + null, + Collections.emptySet() + ); + + TransportQueryUserAction action = new TransportQueryUserAction( + transportService, + mock(ActionFilters.class), + usersStore, + mockProfileService(false, profileIndexExists), + mockRealms() + ); + boolean withProfileUid = randomBoolean(); + QueryUserRequest request = new QueryUserRequest(null, null, null, null, null, withProfileUid); + + doAnswer(invocation -> { + Object[] args = invocation.getArguments(); + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) args[1]; + + listener.onResponse( + new NativeUsersStore.QueryUserResults( + storeUsers.stream().map(user -> new NativeUsersStore.QueryUserResult(user, null)).toList(), + storeUsers.size() + ) + ); + return null; + }).when(usersStore).queryUsers(ArgumentMatchers.any(SearchRequest.class), anyActionListener()); + + final AtomicReference throwableRef = new AtomicReference<>(); + final AtomicReference responseRef = new AtomicReference<>(); + action.doExecute(mock(Task.class), request, new ActionListener<>() { + @Override + public void onResponse(QueryUserResponse response) { + responseRef.set(response); + } + + @Override + public void onFailure(Exception e) { + throwableRef.set(e); + } + }); + + assertThat(throwableRef.get(), is(nullValue())); + assertThat(responseRef.get(), is(notNullValue())); + assertEquals(responseRef.get().getItems().length, storeUsers.size()); + + if (profileIndexExists && withProfileUid) { + assertEquals( + storeUsers.stream().map(user -> "u_profile_" + user.principal()).toList(), + Arrays.stream(responseRef.get().getItems()).map(QueryUserResponse.Item::profileUid).toList() + ); + } else { + for (QueryUserResponse.Item item : responseRef.get().getItems()) { + assertThat(item.profileUid(), nullValue()); + } + } + } + + public void testQueryUsersWithProfileUidException() { + final List storeUsers = randomFrom( + Collections.singletonList(new User("joe")), + Arrays.asList(new User("jane"), new User("fred")), + randomUsers() + ); + NativeUsersStore usersStore = mock(NativeUsersStore.class); + + TransportService transportService = new TransportService( + Settings.EMPTY, + mock(Transport.class), + mock(ThreadPool.class), + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + x -> null, + null, + Collections.emptySet() + ); + + TransportQueryUserAction action = new TransportQueryUserAction( + transportService, + mock(ActionFilters.class), + usersStore, + mockProfileService(true, true), + mockRealms() + ); + + QueryUserRequest request = new QueryUserRequest(null, null, null, null, null, true); + + doAnswer(invocation -> { + Object[] args = invocation.getArguments(); + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) args[1]; + + listener.onResponse( + new NativeUsersStore.QueryUserResults( + storeUsers.stream().map(user -> new NativeUsersStore.QueryUserResult(user, null)).toList(), + storeUsers.size() + ) + ); + return null; + }).when(usersStore).queryUsers(ArgumentMatchers.any(SearchRequest.class), anyActionListener()); + + final PlainActionFuture future = new PlainActionFuture<>(); + action.doExecute(mock(Task.class), request, future); + + final ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, future::actionGet); + + assertThat(e.status(), equalTo(RestStatus.INTERNAL_SERVER_ERROR)); + assertThat(e.getSuppressed().length, greaterThan(0)); + Arrays.stream(e.getSuppressed()).forEach(suppressed -> { + assertThat(suppressed, instanceOf(ElasticsearchException.class)); + assertThat(suppressed.getMessage(), equalTo("something is not right")); + }); + } + + private List randomUsers() { + int size = scaledRandomIntBetween(3, 16); + List users = new ArrayList<>(size); + for (int i = 0; i < size; i++) { + users.add(new User("user_" + i, randomAlphaOfLengthBetween(4, 12))); + } + return users; + } + + private Profile profileFromSubject(Subject subject) { + final User user = subject.getUser(); + final Authentication.RealmRef realmRef = subject.getRealm(); + return new Profile( + "u_profile_" + user.principal(), + randomBoolean(), + randomNonNegativeLong(), + new Profile.ProfileUser( + user.principal(), + Arrays.asList(user.roles()), + realmRef.getName(), + realmRef.getDomain() == null ? null : realmRef.getDomain().name(), + user.email(), + user.fullName() + ), + Map.of(), + Map.of(), + new Profile.VersionControl(randomNonNegativeLong(), randomNonNegativeLong()) + ); + } + + private ProfileService mockProfileService(boolean throwException, boolean profileIndexExists) { + final ProfileService profileService = mock(ProfileService.class); + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + final var listener = (ActionListener>) invocation.getArguments()[1]; + if (false == profileIndexExists) { + listener.onResponse(null); + return null; + } + @SuppressWarnings("unchecked") + final List subjects = (List) invocation.getArguments()[0]; + List> results = subjects.stream() + .map(subject -> new Tuple<>(subject, profileFromSubject(subject))) + .toList(); + + final Map errors = new HashMap<>(); + if (throwException) { + assertThat("random exception requires non-empty results", results, not(empty())); + final int exceptionSize = randomIntBetween(1, results.size()); + errors.putAll( + results.subList(0, exceptionSize) + .stream() + .collect(Collectors.toUnmodifiableMap(Tuple::v1, t -> new ElasticsearchException("something is not right"))) + ); + results = results.subList(exceptionSize - 1, results.size()); + } + + listener.onResponse(new ProfileService.SubjectSearchResultsAndErrors<>(results, errors)); + return null; + }).when(profileService).searchProfilesForSubjects(anyList(), anyActionListener()); + return profileService; + } + + private Realms mockRealms() { + final Realms realms = mock(Realms.class); + when(realms.getRealmRefs()).thenReturn( + Map.of( + new RealmConfig.RealmIdentifier(NativeRealmSettings.TYPE, NativeRealmSettings.DEFAULT_NAME), + new Authentication.RealmRef( + NativeRealmSettings.DEFAULT_NAME, + NativeRealmSettings.TYPE, + randomAlphaOfLengthBetween(3, 8), + null + ) + ) + ); + return realms; + } + private FieldSortBuilder randomFieldSortBuilderWithName(String name) { final FieldSortBuilder fieldSortBuilder = new FieldSortBuilder(name); fieldSortBuilder.order(randomBoolean() ? SortOrder.ASC : SortOrder.DESC); diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/users/40_query.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/users/40_query.yml index d6258a96650e9..cc74b1dfde229 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/users/40_query.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/users/40_query.yml @@ -63,6 +63,17 @@ teardown: } - match: { "created": true } + - do: + security.activate_user_profile: + body: > + { + "grant_type": "password", + "username": "test_user_1", + "password" : "x-pack-test-password" + } + - is_true: uid + - set: { uid: profile_uid_1 } + - do: headers: Authorization: "Basic dXNlcnNfYWRtaW46eC1wYWNrLXRlc3QtcGFzc3dvcmQ=" # users_admin @@ -120,6 +131,8 @@ teardown: "from": 1, "size": 1 } + with_profile_uid: true - match: { total: 2 } - match: { count: 1 } - match: { users.0.username: "test_user_1" } + - match: { users.0.profile_uid: "$profile_uid_1" } From 335a75b3efa6dfdbe57d94b717a55f59db676cb6 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Wed, 7 Feb 2024 08:52:10 +0100 Subject: [PATCH 094/106] Fix exception handling on DateFormatters.forPattern() with wrong date pattern (#105048) --- docs/changelog/105048.yaml | 6 ++++++ .../org/elasticsearch/common/time/DateFormatters.java | 3 ++- .../elasticsearch/common/time/DateFormattersTests.java | 8 ++++++++ .../expression/function/scalar/date/DateParseTests.java | 3 +-- 4 files changed, 17 insertions(+), 3 deletions(-) create mode 100644 docs/changelog/105048.yaml diff --git a/docs/changelog/105048.yaml b/docs/changelog/105048.yaml new file mode 100644 index 0000000000000..d865f447a0a93 --- /dev/null +++ b/docs/changelog/105048.yaml @@ -0,0 +1,6 @@ +pr: 105048 +summary: "ES|QL: Fix exception handling on `date_parse` with wrong date pattern" +area: ES|QL +type: bug +issues: + - 104124 diff --git a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java index dc73dc77c71af..34d583ed7e732 100644 --- a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java +++ b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java @@ -2125,7 +2125,8 @@ static DateFormatter forPattern(String input) { input, new DateTimeFormatterBuilder().appendPattern(input).toFormatter(Locale.ROOT).withResolverStyle(ResolverStyle.STRICT) ); - } catch (IllegalArgumentException e) { + } catch (IllegalArgumentException | ClassCastException e) { + // ClassCastException catches this bug https://bugs.openjdk.org/browse/JDK-8193877 throw new IllegalArgumentException("Invalid format: [" + input + "]: " + e.getMessage(), e); } } diff --git a/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java b/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java index 7ceebab711f35..f00697a3ae870 100644 --- a/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java +++ b/server/src/test/java/org/elasticsearch/common/time/DateFormattersTests.java @@ -1371,4 +1371,12 @@ public void testParsingMissingTimezone() { long millisJoda = DateFormatter.forPattern("yyyy-MM-dd HH:mm:ss").parseMillis("2018-02-18 17:47:17"); assertThat(millisJava, is(millisJoda)); } + + // see https://bugs.openjdk.org/browse/JDK-8193877 + public void testNoClassCastException() { + String input = "DpNKOGqhjZ"; + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> DateFormatter.forPattern(input)); + assertThat(e.getCause(), instanceOf(ClassCastException.class)); + assertThat(e.getMessage(), containsString(input)); + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java index ae53f2e81d158..540d1aa34474b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseTests.java @@ -103,9 +103,8 @@ public static Iterable parameters() { ); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/104124") public void testInvalidPattern() { - String pattern = randomAlphaOfLength(10); + String pattern = "invalid"; DriverContext driverContext = driverContext(); InvalidArgumentException e = expectThrows( InvalidArgumentException.class, From aaadc301115a9167b69e8c31e5e2dcfdbcbfae54 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Wed, 7 Feb 2024 03:16:38 -0500 Subject: [PATCH 095/106] Forward port release notes for v8.12.1 (#105218) --- docs/reference/release-notes.asciidoc | 2 + docs/reference/release-notes/8.12.1.asciidoc | 73 +++++++++++++++++++ .../release-notes/highlights.asciidoc | 14 ++-- 3 files changed, 83 insertions(+), 6 deletions(-) create mode 100644 docs/reference/release-notes/8.12.1.asciidoc diff --git a/docs/reference/release-notes.asciidoc b/docs/reference/release-notes.asciidoc index 1aebf005a64e3..669402c94e9bb 100644 --- a/docs/reference/release-notes.asciidoc +++ b/docs/reference/release-notes.asciidoc @@ -7,6 +7,7 @@ This section summarizes the changes in each release. * <> +* <> * <> * <> * <> @@ -60,6 +61,7 @@ This section summarizes the changes in each release. -- include::release-notes/8.13.0.asciidoc[] +include::release-notes/8.12.1.asciidoc[] include::release-notes/8.12.0.asciidoc[] include::release-notes/8.11.4.asciidoc[] include::release-notes/8.11.3.asciidoc[] diff --git a/docs/reference/release-notes/8.12.1.asciidoc b/docs/reference/release-notes/8.12.1.asciidoc new file mode 100644 index 0000000000000..9aa9a11b3bf02 --- /dev/null +++ b/docs/reference/release-notes/8.12.1.asciidoc @@ -0,0 +1,73 @@ +[[release-notes-8.12.1]] +== {es} version 8.12.1 + +Also see <>. + +[[bug-8.12.1]] +[float] +=== Bug fixes + +Allocation:: +* Improve `CANNOT_REBALANCE_CAN_ALLOCATE` explanation {es-pull}104904[#104904] + +Application:: +* [Connector API] Fix bug in configuration validation parser {es-pull}104198[#104198] +* [Connector API] Fix bug when triggering a sync job via API {es-pull}104802[#104802] +* [Profiling] Query in parallel on content nodes {es-pull}104600[#104600] + +Data streams:: +* Data streams fix failure store delete {es-pull}104281[#104281] +* Fix _alias/ returning non-matching data streams {es-pull}104145[#104145] (issue: {es-issue}96589[#96589]) + +Downsampling:: +* Downsampling supports `date_histogram` with tz {es-pull}103511[#103511] (issue: {es-issue}101309[#101309]) + +ES|QL:: +* Avoid execute ESQL planning on refresh thread {es-pull}104591[#104591] +* ESQL: Allow grouping by null blocks {es-pull}104523[#104523] +* ESQL: Fix `SearchStats#count(String)` to count values not rows {es-pull}104891[#104891] (issue: {es-issue}104795[#104795]) +* Limit concurrent shards per node for ESQL {es-pull}104832[#104832] (issue: {es-issue}103666[#103666]) +* Reduce the number of Evals `ReplaceMissingFieldWithNull` creates {es-pull}104586[#104586] (issue: {es-issue}104583[#104583]) + +Infra/Resiliency:: +* Limit nesting depth in Exception XContent {es-pull}103741[#103741] + +Ingest Node:: +* Better handling of async processor failures {es-pull}104289[#104289] (issue: {es-issue}101921[#101921]) +* Ingest correctly handle upsert operations and drop processors together {es-pull}104585[#104585] (issue: {es-issue}36746[#36746]) + +Machine Learning:: +* Add retry logic for 500 and 503 errors for OpenAI {es-pull}103819[#103819] +* Avoid possible datafeed infinite loop with filtering aggregations {es-pull}104722[#104722] (issue: {es-issue}104699[#104699]) +* [LTR] `FieldValueExtrator` - Checking if fetched values is empty {es-pull}104314[#104314] + +Network:: +* Fix lost headers with chunked responses {es-pull}104808[#104808] + +Search:: +* Don't throw error for remote shards that open PIT filtered out {es-pull}104288[#104288] (issue: {es-issue}102596[#102596]) + +Snapshot/Restore:: +* Fix deleting index during snapshot finalization {es-pull}103817[#103817] (issue: {es-issue}101029[#101029]) + +TSDB:: +* Fix `routing_path` when template has multiple `path_match` and multi-fields {es-pull}104418[#104418] (issue: {es-issue}104400[#104400]) + +Transform:: +* Fix bug when `latest` transform is used together with `from` parameter {es-pull}104606[#104606] (issue: {es-issue}104543[#104543]) + +[[deprecation-8.12.1]] +[float] +=== Deprecations + +Machine Learning:: +* Deprecate machine learning on Intel macOS {es-pull}104087[#104087] + +[[upgrade-8.12.1]] +[float] +=== Upgrades + +Search:: +* [8.12.1] Upgrade to Lucene 9.9.2 {es-pull}104761[#104761] (issue: {es-issue}104617[#104617]) + + diff --git a/docs/reference/release-notes/highlights.asciidoc b/docs/reference/release-notes/highlights.asciidoc index f5252ae6a884f..0452eca8fbfc9 100644 --- a/docs/reference/release-notes/highlights.asciidoc +++ b/docs/reference/release-notes/highlights.asciidoc @@ -27,13 +27,15 @@ Other versions: endif::[] -// The notable-highlights tag marks entries that -// should be featured in the Stack Installation and Upgrade Guide: // tag::notable-highlights[] -// [discrete] -// === Heading -// -// Description. + +[discrete] +[[ga_release_of_synonyms_api]] +=== GA Release of Synonyms API +Removes the beta label for the Synonyms API to make it GA. + +{es-pull}103223[#103223] + // end::notable-highlights[] From 3b7b86c507f159180938951e22e611f594325abd Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 7 Feb 2024 08:53:02 +0000 Subject: [PATCH 096/106] Simplify `ChunkedToXContentHelper#singleChunk` (#105225) There's no need for this helper to take more than one argument. Almost all the usages only passed in a single argument, and the few cases that supplied more than one can be rewritten as a single argument to save allocating all those extra lambdas. --- .../elasticsearch/xcontent/XContentBuilder.java | 2 +- .../allocation/DesiredBalanceResponse.java | 17 ++++++----------- .../admin/cluster/node/stats/NodeStats.java | 9 +++++---- .../org/elasticsearch/cluster/ClusterInfo.java | 16 ++++++++-------- .../xcontent/ChunkedToXContentHelper.java | 16 ++++++---------- .../cluster/node/stats/NodeStatsTests.java | 4 ++-- 6 files changed, 28 insertions(+), 36 deletions(-) diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentBuilder.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentBuilder.java index d63c61eea876c..41512af0f79d4 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentBuilder.java +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentBuilder.java @@ -976,7 +976,7 @@ public XContentBuilder value(Map map) throws IOException { return map(map); } - private XContentBuilder value(ToXContent value, ToXContent.Params params) throws IOException { + public XContentBuilder value(ToXContent value, ToXContent.Params params) throws IOException { if (value == null) { return nullValue(); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/DesiredBalanceResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/DesiredBalanceResponse.java index 0b5f5fc023dc0..54187ababccdf 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/DesiredBalanceResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/DesiredBalanceResponse.java @@ -88,10 +88,10 @@ public void writeTo(StreamOutput out) throws IOException { public Iterator toXContentChunked(ToXContent.Params params) { return Iterators.concat( singleChunk( - (builder, p) -> builder.startObject(), - (builder, p) -> builder.field("stats", stats), - (builder, p) -> builder.field("cluster_balance_stats", clusterBalanceStats), - (builder, p) -> builder.startObject("routing_table") + (builder, p) -> builder.startObject() + .field("stats", stats) + .field("cluster_balance_stats", clusterBalanceStats) + .startObject("routing_table") ), Iterators.flatMap( routingTable.entrySet().iterator(), @@ -172,14 +172,9 @@ public void writeTo(StreamOutput out) throws IOException { @Override public Iterator toXContentChunked(ToXContent.Params params) { return Iterators.concat( - singleChunk((builder, p) -> builder.startObject(), (builder, p) -> builder.startArray("current")), + singleChunk((builder, p) -> builder.startObject().startArray("current")), current().iterator(), - singleChunk( - (builder, p) -> builder.endArray(), - (builder, p) -> builder.field("desired"), - desired, - (builder, p) -> builder.endObject() - ) + singleChunk((builder, p) -> builder.endArray().field("desired").value(desired, p).endObject()) ); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java index cdb9191bd8d70..595e441e9b2cf 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStats.java @@ -304,7 +304,7 @@ public Iterator toXContentChunked(ToXContent.Params outerP return Iterators.concat( - Iterators.single((builder, params) -> { + singleChunk((builder, params) -> { builder.field("name", getNode().getName()); builder.field("transport_address", getNode().getAddress().toString()); builder.field("host", getNode().getHostName()); @@ -329,7 +329,9 @@ public Iterator toXContentChunked(ToXContent.Params outerP ifPresent(getIndices()).toXContentChunked(outerParams), - singleChunk(ifPresent(getOs()), ifPresent(getProcess()), ifPresent(getJvm())), + singleChunk( + (builder, p) -> builder.value(ifPresent(getOs()), p).value(ifPresent(getProcess()), p).value(ifPresent(getJvm()), p) + ), ifPresent(getThreadPool()).toXContentChunked(outerParams), singleChunk(ifPresent(getFs())), @@ -341,8 +343,7 @@ public Iterator toXContentChunked(ToXContent.Params outerP ifPresent(getIngestStats()).toXContentChunked(outerParams), singleChunk(ifPresent(getAdaptiveSelectionStats())), ifPresent(getScriptCacheStats()).toXContentChunked(outerParams), - singleChunk(ifPresent(getIndexingPressureStats())), - singleChunk(ifPresent(getRepositoriesStats())) + singleChunk((builder, p) -> builder.value(ifPresent(getIndexingPressureStats()), p).value(ifPresent(getRepositoriesStats()), p)) ); } diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterInfo.java b/server/src/main/java/org/elasticsearch/cluster/ClusterInfo.java index 0f83e6f2d8e19..f817298e4e328 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterInfo.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterInfo.java @@ -163,8 +163,8 @@ public Iterator toXContentChunked(ToXContent.Params params return builder; }), singleChunk( - (builder, p) -> builder.endObject(), // end "nodes" - (builder, p) -> builder.startObject("shard_sizes") + (builder, p) -> builder.endObject() // end "nodes" + .startObject("shard_sizes") ), Iterators.map( @@ -172,8 +172,8 @@ public Iterator toXContentChunked(ToXContent.Params params c -> (builder, p) -> builder.humanReadableField(c.getKey() + "_bytes", c.getKey(), ByteSizeValue.ofBytes(c.getValue())) ), singleChunk( - (builder, p) -> builder.endObject(), // end "shard_sizes" - (builder, p) -> builder.startObject("shard_data_set_sizes") + (builder, p) -> builder.endObject() // end "shard_sizes" + .startObject("shard_data_set_sizes") ), Iterators.map( shardDataSetSizes.entrySet().iterator(), @@ -184,13 +184,13 @@ public Iterator toXContentChunked(ToXContent.Params params ) ), singleChunk( - (builder, p) -> builder.endObject(), // end "shard_data_set_sizes" - (builder, p) -> builder.startObject("shard_paths") + (builder, p) -> builder.endObject() // end "shard_data_set_sizes" + .startObject("shard_paths") ), Iterators.map(dataPath.entrySet().iterator(), c -> (builder, p) -> builder.field(c.getKey().toString(), c.getValue())), singleChunk( - (builder, p) -> builder.endObject(), // end "shard_paths" - (builder, p) -> builder.startArray("reserved_sizes") + (builder, p) -> builder.endObject() // end "shard_paths" + .startArray("reserved_sizes") ), Iterators.map(reservedSpace.entrySet().iterator(), c -> (builder, p) -> { builder.startObject(); diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java index 4eaf9b5636623..fbc506f600097 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/ChunkedToXContentHelper.java @@ -115,17 +115,13 @@ public static Iterator map(String name, Map map, Func } /** - * Creates an Iterator of a single ToXContent object that serializes all the given 'contents' ToXContent objects into a single chunk. + * Creates an Iterator of a single ToXContent object that serializes the given object as a single chunk. Just wraps {@link + * Iterators#single}, but still useful because it avoids any type ambiguity. * - * @param contents ToXContent objects supporting toXContent() calls. - * @return Iterator of a single ToXContent object serializing all the ToXContent "contents". + * @param item Item to wrap + * @return Singleton iterator for the given item. */ - public static Iterator singleChunk(ToXContent... contents) { - return Iterators.single((builder, params) -> { - for (ToXContent content : contents) { - content.toXContent(builder, params); - } - return builder; - }); + public static Iterator singleChunk(ToXContent item) { + return Iterators.single(item); } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java index e65d99c64ae5e..0290bfb9c236f 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java @@ -491,8 +491,8 @@ public void testChunking() { } private static int expectedChunks(NodeStats nodeStats, NodeStatsLevel level) { - // expectedChunks = number of static chunks (8 at the moment, see NodeStats#toXContentChunked) + number of variable chunks - return 8 + expectedChunks(nodeStats.getHttp()) // + return 7 // number of static chunks, see NodeStats#toXContentChunked + + expectedChunks(nodeStats.getHttp()) // + expectedChunks(nodeStats.getIndices(), level) // + expectedChunks(nodeStats.getTransport()) // + expectedChunks(nodeStats.getIngestStats()) // From 0fefd5b881325d214946008ab0d007dd69041f0b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Slobodan=20Adamovi=C4=87?= Date: Wed, 7 Feb 2024 10:00:39 +0100 Subject: [PATCH 097/106] Validate settings before reloading AD/LDAP bind password (#105133) This is a followup to https://github.com/elastic/elasticsearch/pull/104320 which adds validation during secure setting reload of a `bind_password`. The reload of `secure_bind_password` will now fail with an exception instead of logging a deprecation warning. --- .../authc/ldap/PoolingSessionFactory.java | 53 +++++-- .../authc/ldap/ActiveDirectoryRealmTests.java | 16 +++ .../authc/ldap/LdapRealmReloadTests.java | 131 +++++++++++------- 3 files changed, 136 insertions(+), 64 deletions(-) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/PoolingSessionFactory.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/PoolingSessionFactory.java index 24bdb9243aef7..4f87ac27be141 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/PoolingSessionFactory.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/PoolingSessionFactory.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.core.CharArrays; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; @@ -73,7 +74,7 @@ abstract class PoolingSessionFactory extends SessionFactory implements Releasabl super(config, sslService, threadPool); this.groupResolver = groupResolver; this.bindDn = bindDn; - this.bindRequest = new AtomicReference<>(buildBindRequest(config.settings())); + this.bindRequest = new AtomicReference<>(buildBindRequest(config.settings(), false)); this.useConnectionPool = config.getSetting(poolingEnabled); if (useConnectionPool) { this.connectionPool = createConnectionPool(config, serverSet, timeout, logger, bindRequest.get(), healthCheckDNSupplier); @@ -82,7 +83,21 @@ abstract class PoolingSessionFactory extends SessionFactory implements Releasabl } } - private SimpleBindRequest buildBindRequest(Settings settings) { + /** + * Builds a bind request that is used to authenticate users in LDAP user search mode. The returned {@link SimpleBindRequest} will hold + * the configured bind DN and password. In case the bind DN and password are not configured, then this method will return a simple + * bind request that will perform an anonymous bind. + *

+ * This method can be called during initialization of session factory as well as during the reloading of secure setting. + * This is controlled with the {@code reloadRequest} parameter. If {@code reloadRequest} is se to {@code true}, this method + * will perform a setting consistency validation and throw {@link SettingsException} in case of violation. + * Due to legacy reasons and BWC, when {@code reloadRequest} is se to {@code false}, this method will only log a warning message. + * + * @param reloadRequest {@code true} if this method is called during reloading of secure settings, + * {@code false} if it is called during bootstrapping. + * @return A new {@link SimpleBindRequest} that contains configured bind DN and password. + */ + private SimpleBindRequest buildBindRequest(Settings settings, boolean reloadRequest) { final byte[] bindPassword; final Setting legacyPasswordSetting = config.getConcreteSetting(LEGACY_BIND_PASSWORD); final Setting securePasswordSetting = config.getConcreteSetting(SECURE_BIND_PASSWORD); @@ -104,17 +119,27 @@ private SimpleBindRequest buildBindRequest(Settings settings) { return new SimpleBindRequest(); } else { if (bindPassword == null) { - deprecationLogger.critical( - DeprecationCategory.SECURITY, - "bind_dn_set_without_password", - "[{}] is set but no bind password is specified. Without a corresponding bind password, " - + "all {} realm authentication will fail. Specify a bind password via [{}] or [{}]. " - + "In the next major release, nodes with incomplete bind credentials will fail to start.", - RealmSettings.getFullSettingKey(config, PoolingSessionFactorySettings.BIND_DN), - config.type(), - RealmSettings.getFullSettingKey(config, SECURE_BIND_PASSWORD), - RealmSettings.getFullSettingKey(config, LEGACY_BIND_PASSWORD) - ); + if (reloadRequest) { + throw new SettingsException( + "[{}] is set but no bind password is specified. Without a corresponding bind password, " + + "all {} realm authentication will fail. Specify a bind password via [{}].", + RealmSettings.getFullSettingKey(config, PoolingSessionFactorySettings.BIND_DN), + config.type(), + RealmSettings.getFullSettingKey(config, SECURE_BIND_PASSWORD) + ); + } else { + deprecationLogger.critical( + DeprecationCategory.SECURITY, + "bind_dn_set_without_password", + "[{}] is set but no bind password is specified. Without a corresponding bind password, " + + "all {} realm authentication will fail. Specify a bind password via [{}] or [{}]. " + + "In the next major release, nodes with incomplete bind credentials will fail to start.", + RealmSettings.getFullSettingKey(config, PoolingSessionFactorySettings.BIND_DN), + config.type(), + RealmSettings.getFullSettingKey(config, SECURE_BIND_PASSWORD), + RealmSettings.getFullSettingKey(config, LEGACY_BIND_PASSWORD) + ); + } } return new SimpleBindRequest(this.bindDn, bindPassword); } @@ -123,7 +148,7 @@ private SimpleBindRequest buildBindRequest(Settings settings) { @Override public void reload(Settings settings) { final SimpleBindRequest oldRequest = bindRequest.get(); - final SimpleBindRequest newRequest = buildBindRequest(settings); + final SimpleBindRequest newRequest = buildBindRequest(settings, true); if (bindRequestEquals(newRequest, oldRequest) == false) { if (bindRequest.compareAndSet(oldRequest, newRequest)) { if (connectionPool != null) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryRealmTests.java index 2fb8a69ec9601..caafd1b919e88 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryRealmTests.java @@ -624,6 +624,22 @@ public void testReloadBindPassword() throws Exception { final AuthenticationResult result = future.actionGet(); assertThat(result.toString(), result.getStatus(), is(AuthenticationResult.Status.SUCCESS)); } + + // Verify that reloading fails if password gets removed when bind dn is configured. + var e = expectThrows(Exception.class, () -> realm.reload(Settings.EMPTY)); + assertThat( + e.getMessage(), + containsString( + "[" + + getFullSettingKey(config, PoolingSessionFactorySettings.BIND_DN) + + "] is set but no bind password is specified. Without a corresponding bind password, " + + "all " + + realm.type() + + " realm authentication will fail. Specify a bind password via [" + + getFullSettingKey(config, PoolingSessionFactorySettings.SECURE_BIND_PASSWORD) + + "]." + ) + ); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealmReloadTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealmReloadTests.java index cf62b8355644b..f91e845976412 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealmReloadTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapRealmReloadTests.java @@ -50,6 +50,7 @@ import static org.elasticsearch.xpack.core.security.authc.RealmSettings.getFullSettingKey; import static org.elasticsearch.xpack.core.security.authc.ldap.support.SessionFactorySettings.URLS_SETTING; import static org.elasticsearch.xpack.core.ssl.SSLConfigurationSettings.VERIFICATION_MODE_SETTING_REALM; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; @@ -101,17 +102,9 @@ private RealmConfig getRealmConfig(RealmConfig.RealmIdentifier identifier, Setti } public void testReloadWithoutConnectionPool() throws Exception { - final boolean useLegacyBindSetting = randomBoolean(); - final Settings bindPasswordSettings; - if (useLegacyBindSetting) { - bindPasswordSettings = Settings.builder() - .put(getFullSettingKey(REALM_IDENTIFIER, PoolingSessionFactorySettings.LEGACY_BIND_PASSWORD), INITIAL_BIND_PASSWORD) - .build(); - } else { - bindPasswordSettings = Settings.builder() - .setSecureSettings(secureSettings(PoolingSessionFactorySettings.SECURE_BIND_PASSWORD, INITIAL_BIND_PASSWORD)) - .build(); - } + final Settings bindPasswordSettings = Settings.builder() + .setSecureSettings(secureSettings(PoolingSessionFactorySettings.SECURE_BIND_PASSWORD, INITIAL_BIND_PASSWORD)) + .build(); final Settings settings = Settings.builder() .put(getFullSettingKey(REALM_IDENTIFIER.getName(), LdapUserSearchSessionFactorySettings.POOL_ENABLED), false) .putList(getFullSettingKey(REALM_IDENTIFIER, URLS_SETTING), ldapUrls()) @@ -131,45 +124,23 @@ public void testReloadWithoutConnectionPool() throws Exception { // Generate new password and reload only on ES side final String newBindPassword = randomAlphaOfLengthBetween(5, 10); - final Settings updatedBindPasswordSettings; - if (useLegacyBindSetting) { - updatedBindPasswordSettings = Settings.builder() - .put(getFullSettingKey(REALM_IDENTIFIER, PoolingSessionFactorySettings.LEGACY_BIND_PASSWORD), newBindPassword) - .build(); - } else { - updatedBindPasswordSettings = Settings.builder() - .setSecureSettings(secureSettings(PoolingSessionFactorySettings.SECURE_BIND_PASSWORD, newBindPassword)) - .build(); - } + final Settings updatedBindPasswordSettings = Settings.builder() + .setSecureSettings(secureSettings(PoolingSessionFactorySettings.SECURE_BIND_PASSWORD, newBindPassword)) + .build(); + ldap.reload(updatedBindPasswordSettings); authenticateUserAndAssertStatus(ldap, AuthenticationResult.Status.CONTINUE); // Change password on LDAP server side and check that authentication works changeUserPasswordOnLdapServers(BIND_DN, newBindPassword); authenticateUserAndAssertStatus(ldap, AuthenticationResult.Status.SUCCESS); - - if (useLegacyBindSetting) { - assertSettingDeprecationsAndWarnings( - new Setting[] { - PoolingSessionFactorySettings.LEGACY_BIND_PASSWORD.apply(REALM_IDENTIFIER.getType()) - .getConcreteSettingForNamespace(REALM_IDENTIFIER.getName()) } - ); - } } } public void testReloadWithConnectionPool() throws Exception { - final boolean useLegacyBindSetting = randomBoolean(); - final Settings bindPasswordSettings; - if (useLegacyBindSetting) { - bindPasswordSettings = Settings.builder() - .put(getFullSettingKey(REALM_IDENTIFIER, PoolingSessionFactorySettings.LEGACY_BIND_PASSWORD), INITIAL_BIND_PASSWORD) - .build(); - } else { - bindPasswordSettings = Settings.builder() - .setSecureSettings(secureSettings(PoolingSessionFactorySettings.SECURE_BIND_PASSWORD, INITIAL_BIND_PASSWORD)) - .build(); - } + final Settings bindPasswordSettings = Settings.builder() + .setSecureSettings(secureSettings(PoolingSessionFactorySettings.SECURE_BIND_PASSWORD, INITIAL_BIND_PASSWORD)) + .build(); final Settings settings = Settings.builder() .put(getFullSettingKey(REALM_IDENTIFIER.getName(), LdapUserSearchSessionFactorySettings.POOL_ENABLED), true) .putList(getFullSettingKey(REALM_IDENTIFIER, URLS_SETTING), ldapUrls()) @@ -194,16 +165,10 @@ public void testReloadWithConnectionPool() throws Exception { // Generate a new password and reload only on ES side final String newBindPassword = randomAlphaOfLengthBetween(5, 10); - final Settings updatedBindPasswordSettings; - if (useLegacyBindSetting) { - updatedBindPasswordSettings = Settings.builder() - .put(getFullSettingKey(REALM_IDENTIFIER, PoolingSessionFactorySettings.LEGACY_BIND_PASSWORD), newBindPassword) - .build(); - } else { - updatedBindPasswordSettings = Settings.builder() - .setSecureSettings(secureSettings(PoolingSessionFactorySettings.SECURE_BIND_PASSWORD, newBindPassword)) - .build(); - } + final Settings updatedBindPasswordSettings = Settings.builder() + .setSecureSettings(secureSettings(PoolingSessionFactorySettings.SECURE_BIND_PASSWORD, newBindPassword)) + .build(); + ldap.reload(updatedBindPasswordSettings); // Using new bind password should fail since we did not update it on LDAP server side. authenticateUserAndAssertStatus(ldap, AuthenticationResult.Status.CONTINUE); @@ -211,6 +176,68 @@ public void testReloadWithConnectionPool() throws Exception { // Change password on LDAP server side and check that authentication works now. changeUserPasswordOnLdapServers(BIND_DN, newBindPassword); authenticateUserAndAssertStatus(ldap, AuthenticationResult.Status.SUCCESS); + } + } + + public void testReloadValidation() throws Exception { + final boolean useLegacyBindSetting = randomBoolean(); + final Settings bindPasswordSettings; + if (useLegacyBindSetting) { + bindPasswordSettings = Settings.builder() + .put(getFullSettingKey(REALM_IDENTIFIER, PoolingSessionFactorySettings.LEGACY_BIND_PASSWORD), INITIAL_BIND_PASSWORD) + .build(); + } else { + bindPasswordSettings = Settings.builder() + .setSecureSettings(secureSettings(PoolingSessionFactorySettings.SECURE_BIND_PASSWORD, INITIAL_BIND_PASSWORD)) + .build(); + } + final Settings settings = Settings.builder() + .put(getFullSettingKey(REALM_IDENTIFIER.getName(), LdapUserSearchSessionFactorySettings.POOL_ENABLED), randomBoolean()) + .putList(getFullSettingKey(REALM_IDENTIFIER, URLS_SETTING), ldapUrls()) + .put(defaultRealmSettings) + .put(defaultGlobalSettings) + .put(bindPasswordSettings) + .build(); + final RealmConfig config = getRealmConfig(REALM_IDENTIFIER, settings); + try (SessionFactory sessionFactory = LdapRealm.sessionFactory(config, new SSLService(config.env()), threadPool)) { + assertThat(sessionFactory, is(instanceOf(LdapUserSearchSessionFactory.class))); + LdapRealm ldap = new LdapRealm(config, sessionFactory, buildGroupAsRoleMapper(resourceWatcherService), threadPool); + ldap.initialize(Collections.singleton(ldap), licenseState); + + var e = expectThrows(Exception.class, () -> ldap.reload(Settings.EMPTY)); + assertThat( + e.getMessage(), + equalTo( + "[" + + getFullSettingKey(config, PoolingSessionFactorySettings.BIND_DN) + + "] is set but no bind password is specified. Without a corresponding bind password, " + + "all " + + ldap.type() + + " realm authentication will fail. Specify a bind password via [" + + getFullSettingKey(config, PoolingSessionFactorySettings.SECURE_BIND_PASSWORD) + + "]." + ) + ); + + e = expectThrows( + Exception.class, + () -> ldap.reload( + Settings.builder() + .setSecureSettings(secureSettings(PoolingSessionFactorySettings.SECURE_BIND_PASSWORD, INITIAL_BIND_PASSWORD)) + .put(getFullSettingKey(REALM_IDENTIFIER, PoolingSessionFactorySettings.LEGACY_BIND_PASSWORD), INITIAL_BIND_PASSWORD) + .build() + ) + ); + assertThat( + e.getMessage(), + containsString( + "You cannot specify both [" + + getFullSettingKey(REALM_IDENTIFIER, PoolingSessionFactorySettings.LEGACY_BIND_PASSWORD) + + "] and [" + + getFullSettingKey(REALM_IDENTIFIER, PoolingSessionFactorySettings.SECURE_BIND_PASSWORD) + + "]" + ) + ); if (useLegacyBindSetting) { assertSettingDeprecationsAndWarnings( @@ -219,6 +246,10 @@ public void testReloadWithConnectionPool() throws Exception { .getConcreteSettingForNamespace(REALM_IDENTIFIER.getName()) } ); } + + // The already configured password should stay unchanged + // and the authentication should still work. + authenticateUserAndAssertStatus(ldap, AuthenticationResult.Status.SUCCESS); } } From 01f19ecab295e961bb262fcc01391bcf12dd81d9 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 7 Feb 2024 10:49:11 +0100 Subject: [PATCH 098/106] Simplify and optimize code around TermQueryBuilder.BinaryValues (#105220) Without a change in behavior, we can remove the ununsed ListValues as well as most of the allocations when serializing the values. This still leaves the problem that the temporary buffer in `valueRef` could be massive in size, that will be addressed in a short follow-up that this change sets up. --- .../common/io/stream/StreamOutput.java | 4 +- .../index/query/TermsQueryBuilder.java | 207 ++++-------------- .../index/query/TermsQueryBuilderTests.java | 2 - 3 files changed, 49 insertions(+), 164 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java index a3350c4526a91..b67879510b108 100644 --- a/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java +++ b/server/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java @@ -787,8 +787,10 @@ public final void writeOptionalInstant(@Nullable Instant instant) throws IOExcep }) ); + public static final byte GENERIC_LIST_HEADER = (byte) 7; + public void writeGenericList(List v, Writer writer) throws IOException { - writeByte((byte) 7); + writeByte(GENERIC_LIST_HEADER); writeCollection(v, writer); } diff --git a/server/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java index 6e8a20b1ad290..cdf55ab187dc6 100644 --- a/server/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/TermsQueryBuilder.java @@ -41,7 +41,6 @@ import java.util.Iterator; import java.util.List; import java.util.Objects; -import java.util.function.IntFunction; import java.util.function.Supplier; import java.util.stream.IntStream; @@ -50,10 +49,9 @@ */ public class TermsQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "terms"; - private static final TransportVersion VERSION_STORE_VALUES_AS_BYTES_REFERENCE = TransportVersions.V_7_12_0; private final String fieldName; - private final Values values; + private final BinaryValues values; private final TermsLookup termsLookup; private final Supplier> supplier; @@ -147,7 +145,7 @@ public TermsQueryBuilder(String fieldName, Object... values) { * @param fieldName The field name * @param values The terms */ - public TermsQueryBuilder(String fieldName, Iterable values) { + public TermsQueryBuilder(String fieldName, Collection values) { if (Strings.isEmpty(fieldName)) { throw new IllegalArgumentException("field name cannot be null."); } @@ -155,8 +153,8 @@ public TermsQueryBuilder(String fieldName, Iterable values) { throw new IllegalArgumentException("No value specified for terms query"); } this.fieldName = fieldName; - if (values instanceof Values) { - this.values = (Values) values; + if (values instanceof BinaryValues binaryValues) { + this.values = binaryValues; } else { this.values = new BinaryValues(values, true); } @@ -178,7 +176,7 @@ public TermsQueryBuilder(StreamInput in) throws IOException { super(in); this.fieldName = in.readString(); this.termsLookup = in.readOptionalWriteable(TermsLookup::new); - this.values = Values.readFrom(in); + this.values = in.readOptionalWriteable(BinaryValues::new); this.supplier = null; } @@ -189,14 +187,14 @@ protected void doWriteTo(StreamOutput out) throws IOException { } out.writeString(fieldName); out.writeOptionalWriteable(termsLookup); - Values.writeTo(out, values); + out.writeOptionalWriteable(values); } public String fieldName() { return this.fieldName; } - public Values getValues() { + public BinaryValues getValues() { return values; } @@ -412,116 +410,83 @@ protected QueryBuilder doIndexMetadataRewrite(QueryRewriteContext context) throw return this; } + /** + * Store terms as a {@link BytesReference}. + *

+ * When users send a query contain a lot of terms, A {@link BytesReference} can help + * gc and reduce the cost of {@link #doWriteTo}, which can be slow for lots of terms. + */ @SuppressWarnings("rawtypes") - private abstract static class Values extends AbstractCollection implements Writeable { + public static final class BinaryValues extends AbstractCollection implements Writeable { - private static Values readFrom(StreamInput in) throws IOException { - if (in.getTransportVersion().onOrAfter(VERSION_STORE_VALUES_AS_BYTES_REFERENCE)) { - return in.readOptionalWriteable(BinaryValues::new); - } else { - List list = (List) in.readGenericValue(); - return list == null ? null : new ListValues(list); - } + private final BytesReference valueRef; + private final int size; + + private BinaryValues(StreamInput in) throws IOException { + this(in.readBytesReference()); } - private static void writeTo(StreamOutput out, Values values) throws IOException { - if (out.getTransportVersion().onOrAfter(VERSION_STORE_VALUES_AS_BYTES_REFERENCE)) { - out.writeOptionalWriteable(values); - } else { - if (values == null) { - out.writeGenericValue(null); - } else { - values.writeTo(out); - } - } + private BinaryValues(Collection values, boolean convert) { + this(serialize(values, convert)); } - protected static BytesReference serialize(Iterable values, boolean convert) { - List list; - if (values instanceof List) { - list = (List) values; - } else { - ArrayList arrayList = new ArrayList<>(); - for (Object o : values) { - arrayList.add(o); - } - list = arrayList; - } + private static BytesReference serialize(Collection values, boolean convert) { try (BytesStreamOutput output = new BytesStreamOutput()) { + output.writeByte(StreamOutput.GENERIC_LIST_HEADER); + output.writeVInt(values.size()); if (convert) { - list = list.stream().map(AbstractQueryBuilder::maybeConvertToBytesRef).toList(); + for (Object value : values) { + output.writeGenericValue(AbstractQueryBuilder.maybeConvertToBytesRef(value)); + } + } else { + for (Object value : values) { + output.writeGenericValue(value); + } } - output.writeGenericValue(list); return output.bytes(); } catch (IOException e) { throw new UncheckedIOException("failed to serialize TermsQueryBuilder", e); } } - @Override - public final boolean add(Object o) { - throw new UnsupportedOperationException(); + private BinaryValues(BytesReference bytesRef) { + this.valueRef = bytesRef; + try (StreamInput in = valueRef.streamInput()) { + size = consumerHeadersAndGetListSize(in); + } catch (IOException e) { + throw new UncheckedIOException(e); + } } @Override - public final boolean remove(Object o) { + public boolean remove(Object o) { throw new UnsupportedOperationException(); } @Override - public final boolean containsAll(Collection c) { + public boolean containsAll(Collection c) { throw new UnsupportedOperationException(); } @Override - public final boolean addAll(Collection c) { + public boolean addAll(Collection c) { throw new UnsupportedOperationException(); } @Override - public final boolean removeAll(Collection c) { + public boolean removeAll(Collection c) { throw new UnsupportedOperationException(); } @Override - public final boolean retainAll(Collection c) { + public boolean retainAll(Collection c) { throw new UnsupportedOperationException(); } @Override - public final void clear() { + public void clear() { throw new UnsupportedOperationException(); } - } - - /** - * Store terms as a {@link BytesReference}. - *

- * When users send a query contain a lot of terms, A {@link BytesReference} can help - * gc and reduce the cost of {@link #doWriteTo}, which can be slow for lots of terms. - */ - @SuppressWarnings("rawtypes") - private static class BinaryValues extends Values { - - private final BytesReference valueRef; - private final int size; - - private BinaryValues(StreamInput in) throws IOException { - this(in.readBytesReference()); - } - - private BinaryValues(Iterable values, boolean convert) { - this(serialize(values, convert)); - } - - private BinaryValues(BytesReference bytesRef) { - this.valueRef = bytesRef; - try (StreamInput in = valueRef.streamInput()) { - size = consumerHeadersAndGetListSize(in); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } @Override public int size() { @@ -562,11 +527,7 @@ public Object next() { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(VERSION_STORE_VALUES_AS_BYTES_REFERENCE)) { - out.writeBytesReference(valueRef); - } else { - valueRef.writeTo(out); - } + out.writeBytesReference(valueRef); } @Override @@ -584,87 +545,11 @@ public int hashCode() { private static int consumerHeadersAndGetListSize(StreamInput in) throws IOException { byte genericSign = in.readByte(); - assert genericSign == 7; + assert genericSign == StreamOutput.GENERIC_LIST_HEADER; return in.readVInt(); } } - /** - * This is for lower version requests compatible. - *

- * If we do not keep this, it could be expensive when receiving a request from - * lower version. - * We have to read the value list by {@link StreamInput#readGenericValue}, - * serialize it into {@link BytesReference}, and then deserialize it again when - * {@link #doToQuery} called}. - *

- * - * TODO: remove in 9.0.0 - */ - @SuppressWarnings("rawtypes") - private static class ListValues extends Values { - - private final List values; - - private ListValues(List values) throws IOException { - this.values = values; - } - - @Override - public int size() { - return values.size(); - } - - @Override - public boolean contains(Object o) { - return values.contains(o); - } - - @Override - public Iterator iterator() { - return values.iterator(); - } - - @Override - public Object[] toArray() { - return values.toArray(); - } - - @Override - public Object[] toArray(Object[] a) { - return values.toArray(a); - } - - @Override - @SuppressWarnings("unchecked") - public Object[] toArray(IntFunction generator) { - return values.toArray(generator); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(VERSION_STORE_VALUES_AS_BYTES_REFERENCE)) { - BytesReference bytesRef = serialize(values, false); - out.writeBytesReference(bytesRef); - } else { - out.writeGenericValue(values); - } - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ListValues that = (ListValues) o; - return Objects.equals(values, that.values); - } - - @Override - public int hashCode() { - return Objects.hash(values); - } - } - @Override public TransportVersion getMinimalSupportedVersion() { return TransportVersions.ZERO; diff --git a/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java index 6d43276c7bd20..98096a49443a9 100644 --- a/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/TermsQueryBuilderTests.java @@ -165,8 +165,6 @@ public void testNullValues() { assertThat(e.getMessage(), containsString("No value specified for terms query")); e = expectThrows(IllegalArgumentException.class, () -> new TermsQueryBuilder("field", (Object[]) null)); assertThat(e.getMessage(), containsString("No value specified for terms query")); - e = expectThrows(IllegalArgumentException.class, () -> new TermsQueryBuilder("field", (Iterable) null)); - assertThat(e.getMessage(), containsString("No value specified for terms query")); } public void testBothValuesAndLookupSet() throws IOException { From 011876367ad6d7c4eeebe517b2f4693ccf477a23 Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Wed, 7 Feb 2024 13:01:01 +0200 Subject: [PATCH 099/106] Execute lazy rollover with an internal dedicated user #104732 (#104905) The unconditional rollover that is a consequence of a lazy rollover command is triggered by the creation of a document. In many cases, the user triggering this rollover won't have sufficient privileges to ensure the successful execution of this rollover. For this reason, we introduce a dedicated rollover action and a dedicated internal user to cover this case and enable this functionality. --- docs/changelog/104905.yaml | 6 + .../datastreams/LazyRolloverDataStreamIT.java | 142 ++++++++++++------ .../src/javaRestTest/resources/roles.yml | 2 +- .../datastreams/DataStreamFeatures.java | 4 +- .../elasticsearch/action/ActionModule.java | 2 + .../indices/rollover/LazyRolloverAction.java | 129 ++++++++++++++++ .../rollover/TransportRolloverAction.java | 35 ++++- .../action/bulk/TransportBulkAction.java | 35 +++-- .../bulk/TransportSimulateBulkAction.java | 3 + ...ActionIndicesThatCannotBeCreatedTests.java | 8 +- .../bulk/TransportBulkActionIngestTests.java | 8 +- .../action/bulk/TransportBulkActionTests.java | 11 +- .../bulk/TransportBulkActionTookTests.java | 1 + .../TransportSimulateBulkActionTests.java | 2 + .../snapshots/SnapshotResiliencyTests.java | 5 + .../core/security/user/InternalUsers.java | 26 +++- .../core/security/user/UsernamesField.java | 2 + .../xpack/security/operator/Constants.java | 1 + .../security/authz/AuthorizationUtils.java | 4 + 19 files changed, 358 insertions(+), 68 deletions(-) create mode 100644 docs/changelog/104905.yaml create mode 100644 server/src/main/java/org/elasticsearch/action/admin/indices/rollover/LazyRolloverAction.java diff --git a/docs/changelog/104905.yaml b/docs/changelog/104905.yaml new file mode 100644 index 0000000000000..80e06dc3b0cf5 --- /dev/null +++ b/docs/changelog/104905.yaml @@ -0,0 +1,6 @@ +pr: 104905 +summary: "Execute lazy rollover with an internal dedicated user #104732" +area: Data streams +type: bug +issues: + - 104732 diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LazyRolloverDataStreamIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LazyRolloverDataStreamIT.java index d89dbc346c7e0..978dffb965ac8 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LazyRolloverDataStreamIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LazyRolloverDataStreamIT.java @@ -11,7 +11,18 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; - +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.FeatureFlag; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.cluster.util.resource.Resource; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.junit.Before; +import org.junit.ClassRule; + +import java.io.IOException; import java.util.List; import java.util.Map; @@ -21,10 +32,51 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.startsWith; -public class LazyRolloverDataStreamIT extends DisabledSecurityDataStreamTestCase { +public class LazyRolloverDataStreamIT extends ESRestTestCase { + + private static final String PASSWORD = "secret-test-password"; + private static final String DATA_STREAM_NAME = "lazy-ds"; + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .feature(FeatureFlag.FAILURE_STORE_ENABLED) + .setting("xpack.watcher.enabled", "false") + .setting("xpack.ml.enabled", "false") + .setting("xpack.security.enabled", "true") + .setting("xpack.security.transport.ssl.enabled", "false") + .setting("xpack.security.http.ssl.enabled", "false") + .user("test_admin", PASSWORD, "superuser", false) + .user("test_simple_user", PASSWORD, "not_privileged", false) + .rolesFile(Resource.fromClasspath("roles.yml")) + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } - @SuppressWarnings("unchecked") - public void testLazyRollover() throws Exception { + @Override + protected Settings restClientSettings() { + // If this test is running in a test framework that handles its own authorization, we don't want to overwrite it. + if (super.restClientSettings().keySet().contains(ThreadContext.PREFIX + ".Authorization")) { + return super.restClientSettings(); + } else { + // Note: This user is assigned the role "manage_data_stream_lifecycle". That role is defined in roles.yml. + String token = basicAuthHeaderValue("test_simple_user", new SecureString(PASSWORD.toCharArray())); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + } + + @Override + protected Settings restAdminSettings() { + String authKey = ThreadContext.PREFIX + ".Authorization"; + String token = basicAuthHeaderValue("test_admin", new SecureString(PASSWORD.toCharArray())); + return Settings.builder().put(authKey, token).build(); + } + + @Before + public void setUpDataStreamAsAdmin() throws IOException { Request putComposableIndexTemplateRequest = new Request("POST", "/_index_template/lazy-ds-template"); putComposableIndexTemplateRequest.setJsonEntity(""" { @@ -32,15 +84,28 @@ public void testLazyRollover() throws Exception { "data_stream": {} } """); - assertOK(client().performRequest(putComposableIndexTemplateRequest)); - - String dataStreamName = "lazy-ds"; + assertOK(adminClient().performRequest(putComposableIndexTemplateRequest)); + assertOK(adminClient().performRequest(new Request("PUT", "/_data_stream/" + DATA_STREAM_NAME))); + } - Request createDocRequest = new Request("POST", "/" + dataStreamName + "/_doc?refresh=true"); + @SuppressWarnings("unchecked") + public void testLazyRollover() throws Exception { + Request createDocRequest = new Request("POST", "/" + DATA_STREAM_NAME + "/_doc?refresh=true"); createDocRequest.setJsonEntity("{ \"@timestamp\": \"2020-10-22\", \"a\": 1 }"); assertOK(client().performRequest(createDocRequest)); - final Response rolloverResponse = client().performRequest(new Request("POST", "/" + dataStreamName + "/_rollover?lazy")); + { + ResponseException responseError = expectThrows( + ResponseException.class, + () -> client().performRequest(new Request("POST", "/" + DATA_STREAM_NAME + "/_rollover?lazy")) + ); + assertThat(responseError.getResponse().getStatusLine().getStatusCode(), is(403)); + assertThat( + responseError.getMessage(), + containsString("action [indices:admin/rollover] is unauthorized for user [test_simple_user]") + ); + } + final Response rolloverResponse = adminClient().performRequest(new Request("POST", "/" + DATA_STREAM_NAME + "/_rollover?lazy")); Map rolloverResponseMap = entityAsMap(rolloverResponse); assertThat((String) rolloverResponseMap.get("old_index"), startsWith(".ds-lazy-ds-")); assertThat((String) rolloverResponseMap.get("old_index"), endsWith("-000001")); @@ -53,25 +118,25 @@ public void testLazyRollover() throws Exception { assertThat(rolloverResponseMap.get("conditions"), equalTo(Map.of())); { - final Response dataStreamResponse = client().performRequest(new Request("GET", "/_data_stream/" + dataStreamName)); + final Response dataStreamResponse = adminClient().performRequest(new Request("GET", "/_data_stream/" + DATA_STREAM_NAME)); List dataStreams = (List) entityAsMap(dataStreamResponse).get("data_streams"); assertThat(dataStreams.size(), is(1)); Map dataStream = (Map) dataStreams.get(0); - assertThat(dataStream.get("name"), equalTo(dataStreamName)); + assertThat(dataStream.get("name"), equalTo(DATA_STREAM_NAME)); assertThat(dataStream.get("rollover_on_write"), is(true)); assertThat(((List) dataStream.get("indices")).size(), is(1)); } - createDocRequest = new Request("POST", "/" + dataStreamName + "/_doc?refresh=true"); + createDocRequest = new Request("POST", "/" + DATA_STREAM_NAME + "/_doc?refresh=true"); createDocRequest.setJsonEntity("{ \"@timestamp\": \"2020-10-23\", \"a\": 2 }"); assertOK(client().performRequest(createDocRequest)); { - final Response dataStreamResponse = client().performRequest(new Request("GET", "/_data_stream/" + dataStreamName)); + final Response dataStreamResponse = client().performRequest(new Request("GET", "/_data_stream/" + DATA_STREAM_NAME)); List dataStreams = (List) entityAsMap(dataStreamResponse).get("data_streams"); assertThat(dataStreams.size(), is(1)); Map dataStream = (Map) dataStreams.get(0); - assertThat(dataStream.get("name"), equalTo(dataStreamName)); + assertThat(dataStream.get("name"), equalTo(DATA_STREAM_NAME)); assertThat(dataStream.get("rollover_on_write"), is(false)); assertThat(((List) dataStream.get("indices")).size(), is(2)); } @@ -79,18 +144,7 @@ public void testLazyRollover() throws Exception { @SuppressWarnings("unchecked") public void testLazyRolloverFailsIndexing() throws Exception { - Request putComposableIndexTemplateRequest = new Request("POST", "/_index_template/lazy-ds-template"); - putComposableIndexTemplateRequest.setJsonEntity(""" - { - "index_patterns": ["lazy-ds*"], - "data_stream": {} - } - """); - assertOK(client().performRequest(putComposableIndexTemplateRequest)); - - String dataStreamName = "lazy-ds"; - - Request createDocRequest = new Request("POST", "/" + dataStreamName + "/_doc?refresh=true"); + Request createDocRequest = new Request("POST", "/" + DATA_STREAM_NAME + "/_doc?refresh=true"); createDocRequest.setJsonEntity("{ \"@timestamp\": \"2020-10-22\", \"a\": 1 }"); assertOK(client().performRequest(createDocRequest)); @@ -101,9 +155,9 @@ public void testLazyRolloverFailsIndexing() throws Exception { "cluster.max_shards_per_node": 1 } }"""); - assertAcknowledged(client().performRequest(updateClusterSettingsRequest)); + assertAcknowledged(adminClient().performRequest(updateClusterSettingsRequest)); - final Response rolloverResponse = client().performRequest(new Request("POST", "/" + dataStreamName + "/_rollover?lazy")); + final Response rolloverResponse = adminClient().performRequest(new Request("POST", "/" + DATA_STREAM_NAME + "/_rollover?lazy")); Map rolloverResponseMap = entityAsMap(rolloverResponse); assertThat((String) rolloverResponseMap.get("old_index"), startsWith(".ds-lazy-ds-")); assertThat((String) rolloverResponseMap.get("old_index"), endsWith("-000001")); @@ -116,17 +170,17 @@ public void testLazyRolloverFailsIndexing() throws Exception { assertThat(rolloverResponseMap.get("conditions"), equalTo(Map.of())); { - final Response dataStreamResponse = client().performRequest(new Request("GET", "/_data_stream/" + dataStreamName)); + final Response dataStreamResponse = client().performRequest(new Request("GET", "/_data_stream/" + DATA_STREAM_NAME)); List dataStreams = (List) entityAsMap(dataStreamResponse).get("data_streams"); assertThat(dataStreams.size(), is(1)); Map dataStream = (Map) dataStreams.get(0); - assertThat(dataStream.get("name"), equalTo(dataStreamName)); + assertThat(dataStream.get("name"), equalTo(DATA_STREAM_NAME)); assertThat(dataStream.get("rollover_on_write"), is(true)); assertThat(((List) dataStream.get("indices")).size(), is(1)); } try { - createDocRequest = new Request("POST", "/" + dataStreamName + "/_doc?refresh=true"); + createDocRequest = new Request("POST", "/" + DATA_STREAM_NAME + "/_doc?refresh=true"); createDocRequest.setJsonEntity("{ \"@timestamp\": \"2020-10-23\", \"a\": 2 }"); client().performRequest(createDocRequest); fail("Indexing should have failed."); @@ -141,16 +195,16 @@ public void testLazyRolloverFailsIndexing() throws Exception { "cluster.max_shards_per_node": null } }"""); - assertAcknowledged(client().performRequest(updateClusterSettingsRequest)); - createDocRequest = new Request("POST", "/" + dataStreamName + "/_doc?refresh=true"); + assertAcknowledged(adminClient().performRequest(updateClusterSettingsRequest)); + createDocRequest = new Request("POST", "/" + DATA_STREAM_NAME + "/_doc?refresh=true"); createDocRequest.setJsonEntity("{ \"@timestamp\": \"2020-10-23\", \"a\": 2 }"); assertOK(client().performRequest(createDocRequest)); { - final Response dataStreamResponse = client().performRequest(new Request("GET", "/_data_stream/" + dataStreamName)); + final Response dataStreamResponse = client().performRequest(new Request("GET", "/_data_stream/" + DATA_STREAM_NAME)); List dataStreams = (List) entityAsMap(dataStreamResponse).get("data_streams"); assertThat(dataStreams.size(), is(1)); Map dataStream = (Map) dataStreams.get(0); - assertThat(dataStream.get("name"), equalTo(dataStreamName)); + assertThat(dataStream.get("name"), equalTo(DATA_STREAM_NAME)); assertThat(dataStream.get("rollover_on_write"), is(false)); assertThat(((List) dataStream.get("indices")).size(), is(2)); } @@ -158,25 +212,13 @@ public void testLazyRolloverFailsIndexing() throws Exception { @SuppressWarnings("unchecked") public void testLazyRolloverWithConditions() throws Exception { - Request putComposableIndexTemplateRequest = new Request("POST", "/_index_template/lazy-ds-template"); - putComposableIndexTemplateRequest.setJsonEntity(""" - { - "index_patterns": ["lazy-ds*"], - "data_stream": {} - } - """); - assertOK(client().performRequest(putComposableIndexTemplateRequest)); - - String dataStreamName = "lazy-ds"; - - Request createDocRequest = new Request("POST", "/" + dataStreamName + "/_doc?refresh=true"); + Request createDocRequest = new Request("POST", "/" + DATA_STREAM_NAME + "/_doc?refresh=true"); createDocRequest.setJsonEntity("{ \"@timestamp\": \"2020-10-22\", \"a\": 1 }"); - assertOK(client().performRequest(createDocRequest)); - Request rolloverRequest = new Request("POST", "/" + dataStreamName + "/_rollover?lazy"); + Request rolloverRequest = new Request("POST", "/" + DATA_STREAM_NAME + "/_rollover?lazy"); rolloverRequest.setJsonEntity("{\"conditions\": {\"max_docs\": 1}}"); - ResponseException responseError = expectThrows(ResponseException.class, () -> client().performRequest(rolloverRequest)); + ResponseException responseError = expectThrows(ResponseException.class, () -> adminClient().performRequest(rolloverRequest)); assertThat(responseError.getResponse().getStatusLine().getStatusCode(), is(400)); assertThat(responseError.getMessage(), containsString("only without any conditions")); } diff --git a/modules/data-streams/src/javaRestTest/resources/roles.yml b/modules/data-streams/src/javaRestTest/resources/roles.yml index aa23be3821b95..4c1350e4b058d 100644 --- a/modules/data-streams/src/javaRestTest/resources/roles.yml +++ b/modules/data-streams/src/javaRestTest/resources/roles.yml @@ -11,7 +11,7 @@ not_privileged: cluster: - monitor indices: - - names: [ 'data-stream-lifecycle-*' ] + - names: [ 'data-stream-lifecycle-*', 'lazy-ds*' ] privileges: - read - write diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java index 53b3ca3353bab..734c10570ab2b 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamFeatures.java @@ -8,6 +8,7 @@ package org.elasticsearch.datastreams; +import org.elasticsearch.action.admin.indices.rollover.LazyRolloverAction; import org.elasticsearch.datastreams.lifecycle.health.DataStreamLifecycleHealthInfoPublisher; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; @@ -22,7 +23,8 @@ public class DataStreamFeatures implements FeatureSpecification { @Override public Set getFeatures() { return Set.of( - DataStreamLifecycleHealthInfoPublisher.DSL_HEALTH_INFO_FEATURE // Added in 8.12 + DataStreamLifecycleHealthInfoPublisher.DSL_HEALTH_INFO_FEATURE, // Added in 8.12 + LazyRolloverAction.DATA_STREAM_LAZY_ROLLOVER // Added in 8.13 ); } } diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 6ee26e8fb7e4e..fec1f98324c87 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -136,6 +136,7 @@ import org.elasticsearch.action.admin.indices.refresh.TransportRefreshAction; import org.elasticsearch.action.admin.indices.refresh.TransportShardRefreshAction; import org.elasticsearch.action.admin.indices.resolve.ResolveIndexAction; +import org.elasticsearch.action.admin.indices.rollover.LazyRolloverAction; import org.elasticsearch.action.admin.indices.rollover.RolloverAction; import org.elasticsearch.action.admin.indices.rollover.TransportRolloverAction; import org.elasticsearch.action.admin.indices.segments.IndicesSegmentsAction; @@ -676,6 +677,7 @@ public void reg actions.register(CreateIndexAction.INSTANCE, TransportCreateIndexAction.class); actions.register(ResizeAction.INSTANCE, TransportResizeAction.class); actions.register(RolloverAction.INSTANCE, TransportRolloverAction.class); + actions.register(LazyRolloverAction.INSTANCE, LazyRolloverAction.TransportLazyRolloverAction.class); actions.register(TransportDeleteIndexAction.TYPE, TransportDeleteIndexAction.class); actions.register(GetIndexAction.INSTANCE, TransportGetIndexAction.class); actions.register(OpenIndexAction.INSTANCE, TransportOpenIndexAction.class); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/LazyRolloverAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/LazyRolloverAction.java new file mode 100644 index 0000000000000..9266a320f598c --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/LazyRolloverAction.java @@ -0,0 +1,129 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ +package org.elasticsearch.action.admin.indices.rollover; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.MetadataDataStreamsService; +import org.elasticsearch.cluster.routing.allocation.AllocationService; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.util.Map; + +/** + * API that lazily rolls over a data stream that has the flag {@link DataStream#rolloverOnWrite()} enabled. These requests always + * originate from requests that write into the data stream. + */ +public final class LazyRolloverAction extends ActionType { + + public static final NodeFeature DATA_STREAM_LAZY_ROLLOVER = new NodeFeature("data_stream.rollover.lazy"); + + public static final LazyRolloverAction INSTANCE = new LazyRolloverAction(); + public static final String NAME = "indices:admin/data_stream/lazy_rollover"; + + private LazyRolloverAction() { + super(NAME); + } + + @Override + public String name() { + return NAME; + } + + public static final class TransportLazyRolloverAction extends TransportRolloverAction { + + @Inject + public TransportLazyRolloverAction( + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + MetadataRolloverService rolloverService, + AllocationService allocationService, + MetadataDataStreamsService metadataDataStreamsService, + Client client + ) { + super( + LazyRolloverAction.INSTANCE, + transportService, + clusterService, + threadPool, + actionFilters, + indexNameExpressionResolver, + rolloverService, + client, + allocationService, + metadataDataStreamsService + ); + } + + @Override + protected void masterOperation( + Task task, + RolloverRequest rolloverRequest, + ClusterState clusterState, + ActionListener listener + ) throws Exception { + assert task instanceof CancellableTask; + + assert rolloverRequest.getConditions().hasConditions() == false + && rolloverRequest.isDryRun() == false + && rolloverRequest.isLazy() == false + : "The auto rollover action does not expect any other parameters in the request apart from the data stream name"; + + Metadata metadata = clusterState.metadata(); + // We evaluate the names of the source index as well as what our newly created index would be. + final MetadataRolloverService.NameResolution trialRolloverNames = MetadataRolloverService.resolveRolloverNames( + clusterState, + rolloverRequest.getRolloverTarget(), + rolloverRequest.getNewIndexName(), + rolloverRequest.getCreateIndexRequest() + ); + final String trialSourceIndexName = trialRolloverNames.sourceName(); + final String trialRolloverIndexName = trialRolloverNames.rolloverName(); + MetadataRolloverService.validateIndexName(clusterState, trialRolloverIndexName); + + assert metadata.dataStreams().containsKey(rolloverRequest.getRolloverTarget()) : "Auto-rollover applies only to data streams"; + + final RolloverResponse trialRolloverResponse = new RolloverResponse( + trialSourceIndexName, + trialRolloverIndexName, + Map.of(), + false, + false, + false, + false, + false + ); + + String source = "lazy_rollover source [" + trialRolloverIndexName + "] to target [" + trialRolloverIndexName + "]"; + // We create a new rollover request to ensure that it doesn't contain any other parameters apart from the data stream name + // This will provide a more resilient user experience + RolloverTask rolloverTask = new RolloverTask( + new RolloverRequest(rolloverRequest.getRolloverTarget(), null), + null, + trialRolloverResponse, + listener + ); + submitRolloverTask(rolloverRequest, source, rolloverTask); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java index 4bf4ee975b107..481eda825b047 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; import org.elasticsearch.action.admin.indices.stats.IndexStats; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest; @@ -81,9 +82,35 @@ public TransportRolloverAction( Client client, AllocationService allocationService, MetadataDataStreamsService metadataDataStreamsService + ) { + this( + RolloverAction.INSTANCE, + transportService, + clusterService, + threadPool, + actionFilters, + indexNameExpressionResolver, + rolloverService, + client, + allocationService, + metadataDataStreamsService + ); + } + + TransportRolloverAction( + ActionType actionType, + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + MetadataRolloverService rolloverService, + Client client, + AllocationService allocationService, + MetadataDataStreamsService metadataDataStreamsService ) { super( - RolloverAction.NAME, + actionType.name(), transportService, clusterService, threadPool, @@ -221,7 +248,7 @@ protected void masterOperation( if (rolloverRequest.areConditionsMet(trialConditionResults)) { String source = "rollover_index source [" + trialRolloverIndexName + "] to target [" + trialRolloverIndexName + "]"; RolloverTask rolloverTask = new RolloverTask(rolloverRequest, statsResponse, trialRolloverResponse, delegate); - rolloverTaskQueue.submitTask(source, rolloverTask, rolloverRequest.masterNodeTimeout()); + submitRolloverTask(rolloverRequest, source, rolloverTask); } else { // conditions not met delegate.onResponse(trialRolloverResponse); @@ -230,6 +257,10 @@ protected void masterOperation( ); } + void submitRolloverTask(RolloverRequest rolloverRequest, String source, RolloverTask rolloverTask) { + rolloverTaskQueue.submitTask(source, rolloverTask, rolloverRequest.masterNodeTimeout()); + } + static Map evaluateConditions(final Collection> conditions, @Nullable final Condition.Stats stats) { Objects.requireNonNull(conditions, "conditions must not be null"); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index 2f12008501487..32566b559410d 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -20,7 +20,7 @@ import org.elasticsearch.action.admin.indices.create.AutoCreateAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.elasticsearch.action.admin.indices.rollover.RolloverAction; +import org.elasticsearch.action.admin.indices.rollover.LazyRolloverAction; import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; import org.elasticsearch.action.admin.indices.rollover.RolloverResponse; import org.elasticsearch.action.index.IndexRequest; @@ -31,6 +31,7 @@ import org.elasticsearch.action.support.WriteResponse; import org.elasticsearch.action.support.replication.ReplicationResponse; import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; @@ -50,6 +51,7 @@ import org.elasticsearch.core.Assertions; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexingPressure; @@ -82,17 +84,20 @@ public class TransportBulkAction extends HandledTransportAction { private static final Logger logger = LogManager.getLogger(TransportBulkAction.class); + public static final String LAZY_ROLLOVER_ORIGIN = "lazy_rollover"; private final ActionType bulkAction; private final ThreadPool threadPool; private final ClusterService clusterService; private final IngestService ingestService; + private final FeatureService featureService; private final LongSupplier relativeTimeProvider; private final IngestActionForwarder ingestForwarder; private final NodeClient client; private final IndexNameExpressionResolver indexNameExpressionResolver; private final IndexingPressure indexingPressure; private final SystemIndices systemIndices; + private final OriginSettingClient rolloverClient; @Inject public TransportBulkAction( @@ -100,6 +105,7 @@ public TransportBulkAction( TransportService transportService, ClusterService clusterService, IngestService ingestService, + FeatureService featureService, NodeClient client, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, @@ -111,6 +117,7 @@ public TransportBulkAction( transportService, clusterService, ingestService, + featureService, client, actionFilters, indexNameExpressionResolver, @@ -125,6 +132,7 @@ public TransportBulkAction( TransportService transportService, ClusterService clusterService, IngestService ingestService, + FeatureService featureService, NodeClient client, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, @@ -139,6 +147,7 @@ public TransportBulkAction( transportService, clusterService, ingestService, + featureService, client, actionFilters, indexNameExpressionResolver, @@ -155,6 +164,7 @@ public TransportBulkAction( TransportService transportService, ClusterService clusterService, IngestService ingestService, + FeatureService featureService, NodeClient client, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, @@ -168,6 +178,7 @@ public TransportBulkAction( this.threadPool = threadPool; this.clusterService = clusterService; this.ingestService = ingestService; + this.featureService = featureService; this.relativeTimeProvider = relativeTimeProvider; this.ingestForwarder = new IngestActionForwarder(transportService); this.client = client; @@ -175,6 +186,7 @@ public TransportBulkAction( this.indexingPressure = indexingPressure; this.systemIndices = systemIndices; clusterService.addStateApplier(this.ingestForwarder); + this.rolloverClient = new OriginSettingClient(client, LAZY_ROLLOVER_ORIGIN); } /** @@ -357,10 +369,12 @@ protected void doInternalExecute(Task task, BulkRequest bulkRequest, String exec .collect(Collectors.toMap(Map.Entry::getKey, entry -> entry.getValue().isRequireDataStream)); // Step 3: Collect all the data streams that need to be rolled over before writing - Set dataStreamsToBeRolledOver = indices.keySet().stream().filter(target -> { - DataStream dataStream = state.metadata().dataStreams().get(target); - return dataStream != null && dataStream.rolloverOnWrite(); - }).collect(Collectors.toSet()); + Set dataStreamsToBeRolledOver = featureService.clusterHasFeature(state, LazyRolloverAction.DATA_STREAM_LAZY_ROLLOVER) + ? indices.keySet().stream().filter(target -> { + DataStream dataStream = state.metadata().dataStreams().get(target); + return dataStream != null && dataStream.rolloverOnWrite(); + }).collect(Collectors.toSet()) + : Set.of(); // Step 4: create all the indices that are missing, if there are any missing. start the bulk after all the creates come back. createMissingIndicesAndIndexData( @@ -423,7 +437,7 @@ public void onFailure(Exception e) { }, refs.acquire())); } for (String dataStream : dataStreamsToBeRolledOver) { - rolloverDataStream(dataStream, bulkRequest.timeout(), ActionListener.releaseAfter(new ActionListener<>() { + lazyRolloverDataStream(dataStream, bulkRequest.timeout(), ActionListener.releaseAfter(new ActionListener<>() { @Override public void onResponse(RolloverResponse result) { @@ -432,8 +446,7 @@ public void onResponse(RolloverResponse result) { // - A request had conditions that were not met // Since none of the above apply, getting a response with rolled_over false is considered a bug // that should be caught here and inform the developer. - assert result.isRolledOver() - : "An successful unconditional rollover should always result in a rolled over data stream"; + assert result.isRolledOver() : "An successful lazy rollover should always result in a rolled over data stream"; } @Override @@ -561,10 +574,12 @@ void createIndex(String index, boolean requireDataStream, TimeValue timeout, Act client.execute(AutoCreateAction.INSTANCE, createIndexRequest, listener); } - void rolloverDataStream(String dataStream, TimeValue timeout, ActionListener listener) { + void lazyRolloverDataStream(String dataStream, TimeValue timeout, ActionListener listener) { RolloverRequest rolloverRequest = new RolloverRequest(dataStream, null); rolloverRequest.masterNodeTimeout(timeout); - client.execute(RolloverAction.INSTANCE, rolloverRequest, listener); + // We are executing a lazy rollover because it is an action specialised for this situation, when we want an + // unconditional and performant rollover. + rolloverClient.execute(LazyRolloverAction.INSTANCE, rolloverRequest, listener); } private static boolean setResponseFailureIfIndexMatches( diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java index e77d4ab9e0b85..f65d0f462fde6 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportSimulateBulkAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexingPressure; import org.elasticsearch.indices.SystemIndices; @@ -37,6 +38,7 @@ public TransportSimulateBulkAction( TransportService transportService, ClusterService clusterService, IngestService ingestService, + FeatureService featureService, NodeClient client, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, @@ -50,6 +52,7 @@ public TransportSimulateBulkAction( transportService, clusterService, ingestService, + featureService, client, actionFilters, indexNameExpressionResolver, diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java index 70be3207486ec..3057b00553a22 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIndicesThatCannotBeCreatedTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -25,6 +26,7 @@ import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexingPressure; import org.elasticsearch.index.VersionType; @@ -41,6 +43,7 @@ import java.util.function.Function; import static java.util.Collections.emptySet; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -114,12 +117,15 @@ public boolean hasIndexAbstraction(String indexAbstraction, ClusterState state) final ThreadPool threadPool = mock(ThreadPool.class); TransportService transportService = MockUtils.setupTransportServiceWithThreadpoolExecutor(threadPool); + FeatureService mockFeatureService = mock(FeatureService.class); + when(mockFeatureService.clusterHasFeature(any(), any())).thenReturn(true); TransportBulkAction action = new TransportBulkAction( threadPool, transportService, clusterService, null, - null, + mockFeatureService, + new NodeClient(Settings.EMPTY, threadPool), mock(ActionFilters.class), indexNameExpressionResolver, new IndexingPressure(Settings.EMPTY), diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java index 564cf74697194..6815d634292a4 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.AutoCreateIndex; import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateApplier; @@ -36,6 +37,7 @@ import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; @@ -91,6 +93,7 @@ public class TransportBulkActionIngestTests extends ESTestCase { private static final Settings SETTINGS = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), true).build(); private static final Thread DUMMY_WRITE_THREAD = new Thread(ThreadPool.Names.WRITE); + private FeatureService mockFeatureService; /** Services needed by bulk action */ TransportService transportService; @@ -140,7 +143,8 @@ class TestTransportBulkAction extends TransportBulkAction { transportService, clusterService, ingestService, - null, + mockFeatureService, + new NodeClient(Settings.EMPTY, threadPool), new ActionFilters(Collections.emptySet()), TestIndexNameExpressionResolver.newInstance(), new IndexingPressure(SETTINGS), @@ -203,6 +207,8 @@ public void setupAction() throws IOException { when(nodes.getIngestNodes()).thenReturn(ingestNodes); ClusterState state = mock(ClusterState.class); when(state.getNodes()).thenReturn(nodes); + mockFeatureService = mock(FeatureService.class); + when(mockFeatureService.clusterHasFeature(any(), any())).thenReturn(true); Metadata metadata = Metadata.builder() .indices( Map.of( diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java index 6f3767892e7a4..1a16d9083df55 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.DataStream; @@ -34,6 +35,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; @@ -65,6 +67,9 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.junit.Assume.assumeThat; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class TransportBulkActionTests extends ESTestCase { @@ -74,6 +79,7 @@ public class TransportBulkActionTests extends ESTestCase { private TestThreadPool threadPool; private TestTransportBulkAction bulkAction; + private FeatureService mockFeatureService; class TestTransportBulkAction extends TransportBulkAction { @@ -87,7 +93,8 @@ class TestTransportBulkAction extends TransportBulkAction { transportService, clusterService, null, - null, + mockFeatureService, + new NodeClient(Settings.EMPTY, TransportBulkActionTests.this.threadPool), new ActionFilters(Collections.emptySet()), new Resolver(), new IndexingPressure(Settings.EMPTY), @@ -132,6 +139,8 @@ public void setUp() throws Exception { ); transportService.start(); transportService.acceptIncomingRequests(); + mockFeatureService = mock(FeatureService.class); + when(mockFeatureService.clusterHasFeature(any(), any())).thenReturn(true); bulkAction = new TestTransportBulkAction(); } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java index d4c5fc09e821f..cb9bdd1f3a827 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java @@ -247,6 +247,7 @@ static class TestTransportBulkAction extends TransportBulkAction { transportService, clusterService, null, + null, client, actionFilters, indexNameExpressionResolver, diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java index 0a3adaf54a8ea..2657bdef8c09d 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.ingest.SimulateIndexResponse; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.service.ClusterService; @@ -74,6 +75,7 @@ class TestTransportSimulateBulkAction extends TransportSimulateBulkAction { clusterService, null, null, + new NodeClient(Settings.EMPTY, TransportSimulateBulkActionTests.this.threadPool), new ActionFilters(Collections.emptySet()), new TransportBulkActionTookTests.Resolver(), new IndexingPressure(Settings.EMPTY), diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index 26f41b932f98f..0d38cdfcafd2b 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -222,7 +222,9 @@ import static org.hamcrest.Matchers.iterableWithSize; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class SnapshotResiliencyTests extends ESTestCase { @@ -1649,6 +1651,8 @@ protected void connectToNodesAndWait(ClusterState newClusterState) { } ); recoverySettings = new RecoverySettings(settings, clusterSettings); + FeatureService mockFeatureService = mock(FeatureService.class); + when(mockFeatureService.clusterHasFeature(any(), any())).thenReturn(true); mockTransport = new DisruptableMockTransport(node, deterministicTaskQueue) { @Override protected ConnectionStatus getConnectionStatus(DiscoveryNode destination) { @@ -1938,6 +1942,7 @@ protected void assertSnapshotOrGenericThread() { null, () -> DocumentParsingObserver.EMPTY_INSTANCE ), + mockFeatureService, client, actionFilters, indexNameExpressionResolver, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java index 6162fb36c0497..43863d1b203d1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/InternalUsers.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction; import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockAction; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; +import org.elasticsearch.action.admin.indices.rollover.LazyRolloverAction; import org.elasticsearch.action.admin.indices.rollover.RolloverAction; import org.elasticsearch.action.admin.indices.settings.put.TransportUpdateSettingsAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; @@ -179,6 +180,28 @@ public class InternalUsers { ) ); + /** + * Internal user that can rollover an index/data stream. + */ + public static final InternalUser LAZY_ROLLOVER_USER = new InternalUser( + UsernamesField.LAZY_ROLLOVER_NAME, + new RoleDescriptor( + UsernamesField.LAZY_ROLLOVER_ROLE, + new String[] {}, + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder() + .indices("*") + .privileges(LazyRolloverAction.NAME) + .allowRestrictedIndices(true) + .build() }, + null, + null, + new String[] {}, + MetadataUtils.DEFAULT_RESERVED_METADATA, + Map.of() + ) + ); + /** * internal user that manages synonyms via the Synonyms API. Operates on the synonyms system index */ @@ -211,7 +234,8 @@ public class InternalUsers { ASYNC_SEARCH_USER, STORAGE_USER, DATA_STREAM_LIFECYCLE_USER, - SYNONYMS_USER + SYNONYMS_USER, + LAZY_ROLLOVER_USER ).collect(Collectors.toUnmodifiableMap(InternalUser::principal, Function.identity())); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/UsernamesField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/UsernamesField.java index 821d222bb930c..22e3c2df22ec3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/UsernamesField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/UsernamesField.java @@ -38,6 +38,8 @@ public final class UsernamesField { public static final String REMOTE_MONITORING_NAME = "remote_monitoring_user"; public static final String REMOTE_MONITORING_COLLECTION_ROLE = "remote_monitoring_collector"; public static final String REMOTE_MONITORING_INDEXING_ROLE = "remote_monitoring_agent"; + public static final String LAZY_ROLLOVER_NAME = "_lazy_rollover"; + public static final String LAZY_ROLLOVER_ROLE = "_lazy_rollover"; private UsernamesField() {} } diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index c99b779df5a54..7ae9038b95b70 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -461,6 +461,7 @@ public class Constants { "indices:admin/block/add", "indices:admin/block/add[s]", "indices:admin/cache/clear", + "indices:admin/data_stream/lazy_rollover", "indices:internal/admin/ccr/restore/file_chunk/get", "indices:internal/admin/ccr/restore/session/clear", "indices:internal/admin/ccr/restore/session/put", diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java index 798396c249e75..629a1a476995f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java @@ -20,6 +20,7 @@ import java.util.function.Predicate; import static org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskAction.TASKS_ORIGIN; +import static org.elasticsearch.action.bulk.TransportBulkAction.LAZY_ROLLOVER_ORIGIN; import static org.elasticsearch.action.support.replication.PostWriteRefresh.POST_WRITE_REFRESH_ORIGIN; import static org.elasticsearch.cluster.metadata.DataStreamLifecycle.DATA_STREAM_LIFECYCLE_ORIGIN; import static org.elasticsearch.ingest.IngestService.INGEST_ORIGIN; @@ -134,6 +135,9 @@ public static void switchUserBasedOnActionOriginAndExecute( case DATA_STREAM_LIFECYCLE_ORIGIN: securityContext.executeAsInternalUser(InternalUsers.DATA_STREAM_LIFECYCLE_USER, version, consumer); break; + case LAZY_ROLLOVER_ORIGIN: + securityContext.executeAsInternalUser(InternalUsers.LAZY_ROLLOVER_USER, version, consumer); + break; case WATCHER_ORIGIN: case ML_ORIGIN: case MONITORING_ORIGIN: From db4d31ddb412559fcdc9eecf74e4dab79c8d5664 Mon Sep 17 00:00:00 2001 From: Pooya Salehi Date: Wed, 7 Feb 2024 12:50:57 +0100 Subject: [PATCH 100/106] Improve exception handling for stateless realtime-get/mget (#105028) Relates #105003, ES-5727 --- .../action/get/TransportGetAction.java | 28 +++++++++---------- .../get/TransportShardMultiGetAction.java | 27 ++++++++---------- 2 files changed, 25 insertions(+), 30 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java index d26545fd8acca..d3d19fe1714ba 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java @@ -192,12 +192,7 @@ private void handleGetOnUnpromotableShard(GetRequest request, IndexShard indexSh throws IOException { ShardId shardId = indexShard.shardId(); if (request.refresh()) { - var node = getCurrentNodeOfPrimary(clusterService.state(), shardId); - if (node == null) { - listener.onFailure(new NoShardAvailableActionException(shardId, "primary shard is not active")); - return; - } - logger.trace("send refresh action for shard {} to node {}", shardId, node.getId()); + logger.trace("send refresh action for shard {}", shardId); var refreshRequest = new BasicReplicationRequest(shardId); refreshRequest.setParentTask(request.getParentTask()); client.executeLocally( @@ -230,7 +225,14 @@ private void getFromTranslog( ClusterStateObserver observer, ActionListener listener ) { - tryGetFromTranslog(request, indexShard, state, listener.delegateResponse((l, e) -> { + DiscoveryNode node; + try { + node = getCurrentNodeOfPrimary(state, indexShard.shardId()); + } catch (Exception e) { + listener.onFailure(e); + return; + } + final var retryingListener = listener.delegateResponse((l, e) -> { final var cause = ExceptionsHelper.unwrapCause(e); logger.debug("get_from_translog failed", cause); if (cause instanceof ShardNotFoundException || cause instanceof IndexNotFoundException) { @@ -254,16 +256,12 @@ public void onTimeout(TimeValue timeout) { } else { l.onFailure(e); } - })); + }); + tryGetFromTranslog(request, indexShard, node, retryingListener); } - private void tryGetFromTranslog(GetRequest request, IndexShard indexShard, ClusterState state, ActionListener listener) { + private void tryGetFromTranslog(GetRequest request, IndexShard indexShard, DiscoveryNode node, ActionListener listener) { ShardId shardId = indexShard.shardId(); - var node = getCurrentNodeOfPrimary(state, shardId); - if (node == null) { - listener.onFailure(new NoShardAvailableActionException(shardId, "primary shard is not active")); - return; - } TransportGetFromTranslogAction.Request getFromTranslogRequest = new TransportGetFromTranslogAction.Request(request, shardId); getFromTranslogRequest.setParentTask(request.getParentTask()); transportService.sendRequest( @@ -300,7 +298,7 @@ private void tryGetFromTranslog(GetRequest request, IndexShard indexShard, Clust static DiscoveryNode getCurrentNodeOfPrimary(ClusterState clusterState, ShardId shardId) { var shardRoutingTable = clusterState.routingTable().shardRoutingTable(shardId); if (shardRoutingTable.primaryShard() == null || shardRoutingTable.primaryShard().active() == false) { - return null; + throw new NoShardAvailableActionException(shardId, "primary shard is not active"); } DiscoveryNode node = clusterState.nodes().get(shardRoutingTable.primaryShard().currentNodeId()); assert node != null; diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java index 2e558b42d7e2b..4fd3dab11355b 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java @@ -16,7 +16,6 @@ import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.NoShardAvailableActionException; import org.elasticsearch.action.admin.indices.refresh.TransportShardRefreshAction; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportActions; @@ -172,12 +171,7 @@ private void handleMultiGetOnUnpromotableShard( ) throws IOException { ShardId shardId = indexShard.shardId(); if (request.refresh()) { - var node = getCurrentNodeOfPrimary(clusterService.state(), shardId); - if (node == null) { - listener.onFailure(new NoShardAvailableActionException(shardId, "primary shard is not active")); - return; - } - logger.trace("send refresh action for shard {} to node {}", shardId, node.getId()); + logger.trace("send refresh action for shard {}", shardId); var refreshRequest = new BasicReplicationRequest(shardId); refreshRequest.setParentTask(request.getParentTask()); client.executeLocally( @@ -210,7 +204,14 @@ private void shardMultiGetFromTranslog( ClusterStateObserver observer, ActionListener listener ) { - tryShardMultiGetFromTranslog(request, indexShard, state, listener.delegateResponse((l, e) -> { + DiscoveryNode node; + try { + node = getCurrentNodeOfPrimary(state, indexShard.shardId()); + } catch (Exception e) { + listener.onFailure(e); + return; + } + final var retryingListener = listener.delegateResponse((l, e) -> { final var cause = ExceptionsHelper.unwrapCause(e); logger.debug("mget_from_translog[shard] failed", cause); if (cause instanceof ShardNotFoundException || cause instanceof IndexNotFoundException) { @@ -234,21 +235,17 @@ public void onTimeout(TimeValue timeout) { } else { l.onFailure(e); } - })); + }); + tryShardMultiGetFromTranslog(request, indexShard, node, retryingListener); } private void tryShardMultiGetFromTranslog( MultiGetShardRequest request, IndexShard indexShard, - ClusterState state, + DiscoveryNode node, ActionListener listener ) { final var shardId = indexShard.shardId(); - var node = getCurrentNodeOfPrimary(state, shardId); - if (node == null) { - listener.onFailure(new NoShardAvailableActionException(shardId, "primary shard is not active")); - return; - } TransportShardMultiGetFomTranslogAction.Request mgetFromTranslogRequest = new TransportShardMultiGetFomTranslogAction.Request( request, shardId From 25dd12df3b10dd19ffbd4a5ca299f295b0dcec2b Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 7 Feb 2024 12:11:42 +0000 Subject: [PATCH 101/106] AwaitsFix for #105236 --- server/src/test/java/org/elasticsearch/node/NodeTests.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/server/src/test/java/org/elasticsearch/node/NodeTests.java b/server/src/test/java/org/elasticsearch/node/NodeTests.java index 117dba292c8ab..986ed9184f3e7 100644 --- a/server/src/test/java/org/elasticsearch/node/NodeTests.java +++ b/server/src/test/java/org/elasticsearch/node/NodeTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.node; import org.apache.lucene.tests.util.LuceneTestCase; +import org.apache.lucene.util.Constants; import org.apache.lucene.util.SetOnce; import org.elasticsearch.bootstrap.BootstrapCheck; import org.elasticsearch.bootstrap.BootstrapContext; @@ -303,6 +304,7 @@ public void testCloseOnInterruptibleTask() throws Exception { } public void testCloseOnLeakedIndexReaderReference() throws Exception { + assumeFalse("AwaitsFix https://github.com/elastic/elasticsearch/issues/105236", Constants.MAC_OS_X); Node node = new MockNode(baseSettings().build(), basePlugins()); node.start(); IndicesService indicesService = node.injector().getInstance(IndicesService.class); @@ -318,6 +320,7 @@ public void testCloseOnLeakedIndexReaderReference() throws Exception { } public void testCloseOnLeakedStoreReference() throws Exception { + assumeFalse("AwaitsFix https://github.com/elastic/elasticsearch/issues/105236", Constants.MAC_OS_X); Node node = new MockNode(baseSettings().build(), basePlugins()); node.start(); IndicesService indicesService = node.injector().getInstance(IndicesService.class); From baf8b5ae38242773702c0fc59557d82811553336 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 7 Feb 2024 14:28:28 +0100 Subject: [PATCH 102/106] Fix a few downsample api issues (#105228) Improve downsampling by making the following changes: - Avoid NPE and assert tripping when fetching the last processed tsid. - If the write block has been set, then there is no reason to start the downsample persistent tasks, since shard level downsampling has completed. Not doing so also causes ILM/DSL to get stuck on downsampling. In this case shard level downsampling should be skipped. - Sometimes the source index may not be allocated yet on the node performing shard level downsampling operation. This causes a NPE, with this PR, this now fails a shard level downsample with a less disturbing error. Additionally unmute DataStreamLifecycleDownsampleDisruptionIT#testDataStreamLifecycleDownsampleRollingRestart Relates to #105068 --- docs/changelog/105228.yaml | 6 ++ ...StreamLifecycleDownsampleDisruptionIT.java | 7 ++- ...DownsampleShardPersistentTaskExecutor.java | 55 ++++++++++--------- .../downsample/TransportDownsampleAction.java | 18 +++++- 4 files changed, 56 insertions(+), 30 deletions(-) create mode 100644 docs/changelog/105228.yaml diff --git a/docs/changelog/105228.yaml b/docs/changelog/105228.yaml new file mode 100644 index 0000000000000..7526a3caa81d9 --- /dev/null +++ b/docs/changelog/105228.yaml @@ -0,0 +1,6 @@ +pr: 105228 +summary: Downsampling better handle if source index isn't allocated and fix bug in + retrieving last processed tsid +area: Downsampling +type: bug +issues: [] diff --git a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java index 8d45d66702bd0..76cc8308a4703 100644 --- a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java +++ b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java @@ -14,6 +14,7 @@ import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.action.downsample.DownsampleConfig; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.metadata.DataStreamLifecycle; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; @@ -56,7 +57,6 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { return settings.build(); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/105068") @TestLogging(value = "org.elasticsearch.datastreams.lifecycle:TRACE", reason = "debugging") public void testDataStreamLifecycleDownsampleRollingRestart() throws Exception { final InternalTestCluster cluster = internalCluster(); @@ -129,13 +129,14 @@ public boolean validateClusterForming() { waitUntil(() -> getClusterPendingTasks(cluster.client()).pendingTasks().isEmpty(), 60, TimeUnit.SECONDS); ensureStableCluster(cluster.numDataAndMasterNodes()); - final String targetIndex = "downsample-5m-" + sourceIndex; + // if the source index has already been downsampled and moved into the data stream just use its name directly + final String targetIndex = sourceIndex.startsWith("downsample-5m-") ? sourceIndex : "downsample-5m-" + sourceIndex; assertBusy(() -> { try { GetSettingsResponse getSettingsResponse = cluster.client() .admin() .indices() - .getSettings(new GetSettingsRequest().indices(targetIndex)) + .getSettings(new GetSettingsRequest().indices(targetIndex).indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN)) .actionGet(); Settings indexSettings = getSettingsResponse.getIndexToSettings().get(targetIndex); assertThat(indexSettings, is(notNullValue())); diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardPersistentTaskExecutor.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardPersistentTaskExecutor.java index 6fa09ef2175c4..fbef15d4c24c7 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardPersistentTaskExecutor.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardPersistentTaskExecutor.java @@ -9,6 +9,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; @@ -26,6 +27,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardNotFoundException; @@ -68,13 +70,19 @@ protected void nodeOperation( final SearchRequest searchRequest = new SearchRequest(params.downsampleIndex()); searchRequest.source().sort(TimeSeriesIdFieldMapper.NAME, SortOrder.DESC).size(1); searchRequest.preference("_shards:" + params.shardId().id()); - client.search( - searchRequest, - ActionListener.wrap( - searchResponse -> delegate(task, params, searchResponse.getHits().getHits()), - e -> delegate(task, params, new SearchHit[] {}) - ) - ); + client.search(searchRequest, ActionListener.wrap(searchResponse -> { + delegate(task, params, extractTsId(searchResponse.getHits().getHits())); + }, e -> delegate(task, params, null))); + } + + private static BytesRef extractTsId(SearchHit[] lastDownsampleTsidHits) { + if (lastDownsampleTsidHits.length == 0) { + return null; + } else { + var searchHit = Arrays.stream(lastDownsampleTsidHits).findFirst().get(); + var field = searchHit.field("_tsid"); + return field != null ? field.getValue() : null; + } } @Override @@ -154,15 +162,11 @@ public String getExecutor() { return ThreadPool.Names.SAME; } - private void delegate( - final AllocatedPersistentTask task, - final DownsampleShardTaskParams params, - final SearchHit[] lastDownsampledTsidHits - ) { + private void delegate(final AllocatedPersistentTask task, final DownsampleShardTaskParams params, final BytesRef lastDownsampleTsid) { DownsampleShardTask downsampleShardTask = (DownsampleShardTask) task; client.execute( DelegatingAction.INSTANCE, - new DelegatingAction.Request(downsampleShardTask, lastDownsampledTsidHits, params), + new DelegatingAction.Request(downsampleShardTask, lastDownsampleTsid, params), ActionListener.wrap(empty -> {}, e -> { LOGGER.error("error while delegating", e); markAsFailed(downsampleShardTask, e); @@ -175,7 +179,7 @@ static void realNodeOperation( IndicesService indicesService, DownsampleShardTask task, DownsampleShardTaskParams params, - SearchHit[] lastDownsampleTsidHits + BytesRef lastDownsampledTsid ) { client.threadPool().executor(Downsample.DOWNSAMPLE_TASK_THREAD_POOL_NAME).execute(new AbstractRunnable() { @Override @@ -185,17 +189,15 @@ public void onFailure(Exception e) { @Override protected void doRun() throws Exception { - final var initialState = lastDownsampleTsidHits.length == 0 - ? new DownsampleShardPersistentTaskState(DownsampleShardIndexerStatus.INITIALIZED, null) - : new DownsampleShardPersistentTaskState( - DownsampleShardIndexerStatus.STARTED, - Arrays.stream(lastDownsampleTsidHits).findFirst().get().field("_tsid").getValue() - ); + final var initialState = new DownsampleShardPersistentTaskState( + DownsampleShardIndexerStatus.INITIALIZED, + lastDownsampledTsid + ); try { final var downsampleShardIndexer = new DownsampleShardIndexer( task, client, - indicesService.indexService(params.shardId().getIndex()), + indicesService.indexServiceSafe(params.shardId().getIndex()), params.shardId(), params.downsampleIndex(), params.downsampleConfig(), @@ -216,6 +218,9 @@ protected void doRun() throws Exception { ); markAsFailed(task, e); } + } catch (IndexNotFoundException e) { + LOGGER.error("Downsampling task [" + task.getPersistentTaskId() + " failing because source index not assigned"); + markAsFailed(task, e); } catch (final Exception e) { LOGGER.error("Downsampling task [" + task.getPersistentTaskId() + " non-retriable failure [" + e.getMessage() + "]"); markAsFailed(task, e); @@ -248,12 +253,12 @@ private DelegatingAction() { public static class Request extends ActionRequest implements IndicesRequest { private final DownsampleShardTask task; - private final SearchHit[] lastDownsampleTsidHits; + private final BytesRef lastDownsampleTsid; private final DownsampleShardTaskParams params; - public Request(DownsampleShardTask task, SearchHit[] lastDownsampleTsidHits, DownsampleShardTaskParams params) { + public Request(DownsampleShardTask task, BytesRef lastDownsampleTsid, DownsampleShardTaskParams params) { this.task = task; - this.lastDownsampleTsidHits = lastDownsampleTsidHits; + this.lastDownsampleTsid = lastDownsampleTsid; this.params = params; } @@ -292,7 +297,7 @@ public TA(TransportService transportService, ActionFilters actionFilters, Client @Override protected void doExecute(Task t, Request request, ActionListener listener) { - realNodeOperation(client, indicesService, request.task, request.params, request.lastDownsampleTsidHits); + realNodeOperation(client, indicesService, request.task, request.params, request.lastDownsampleTsid); listener.onResponse(ActionResponse.Empty.INSTANCE); } } diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java index c4b48bbb016ef..b761fcab1b6db 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java @@ -184,6 +184,7 @@ protected void masterOperation( ActionListener listener ) { String sourceIndexName = request.getSourceIndex(); + downsamplingInterval = request.getDownsampleConfig().getInterval().toString(); final IndicesAccessControl indicesAccessControl = threadContext.getTransient(AuthorizationServiceField.INDICES_PERMISSIONS_KEY); if (indicesAccessControl != null) { @@ -234,6 +235,7 @@ protected void masterOperation( return; } + final TaskId parentTask = new TaskId(clusterService.localNode().getId(), task.getId()); // Shortcircuit if target index has been downsampled: final String downsampleIndexName = request.getTargetIndex(); IndexMetadata downsampleIndex = state.getMetadata().index(downsampleIndexName); @@ -247,6 +249,20 @@ protected void masterOperation( listener.onResponse(AcknowledgedResponse.TRUE); return; } + // In case the write block has been set on the target index means that the shard level downsampling itself was successful, + // but the previous invocation failed later performing settings update, refresh or force merge. + // The write block is used a signal to resume from the refresh part of the downsample api invocation. + if (downsampleIndex.getSettings().get(IndexMetadata.SETTING_BLOCKS_WRITE) != null) { + var refreshRequest = new RefreshRequest(downsampleIndexName); + refreshRequest.setParentTask(parentTask); + client.admin() + .indices() + .refresh( + refreshRequest, + new RefreshDownsampleIndexActionListener(listener, parentTask, downsampleIndexName, request.getWaitTimeout()) + ); + return; + } } try { MetadataCreateIndexService.validateIndexName(downsampleIndexName, state); @@ -266,7 +282,6 @@ protected void masterOperation( // At any point if there is an issue, delete the downsample index // 1. Extract source index mappings - final TaskId parentTask = new TaskId(clusterService.localNode().getId(), task.getId()); final GetMappingsRequest getMappingsRequest = new GetMappingsRequest().indices(sourceIndexName); getMappingsRequest.setParentTask(parentTask); client.admin().indices().getMappings(getMappingsRequest, listener.delegateFailureAndWrap((delegate, getMappingsResponse) -> { @@ -285,7 +300,6 @@ protected void masterOperation( // Validate downsampling interval validateDownsamplingInterval(mapperService, request.getDownsampleConfig()); - downsamplingInterval = request.getDownsampleConfig().getInterval().toString(); final List dimensionFields = new ArrayList<>(); final List metricFields = new ArrayList<>(); From 9651cd7e262b5093f7c7bf41e066b613a534ce98 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Wed, 7 Feb 2024 14:39:38 +0100 Subject: [PATCH 103/106] [Profiling] Use plain arrays in stack traces (#105226) With this commit we refactor the internal representation of stacktraces to use plain arrays instead of lists for some of its properties. The motivation behind this change is simplicity: * It avoids unnecessary boxing * We could eliminate a few redundant null checks because we use primitive types now in some places * We could slightly simplify runlength decoding --- .../profiling/GetStackTracesActionIT.java | 24 +++---- .../xpack/profiling/StackTrace.java | 66 +++++++++---------- .../TransportGetFlamegraphAction.java | 12 ++-- .../TransportGetStackTracesAction.java | 4 +- .../GetStackTracesResponseTests.java | 9 ++- .../xpack/profiling/StackTraceTests.java | 51 +++++++------- .../TransportGetFlamegraphActionTests.java | 14 ++-- 7 files changed, 86 insertions(+), 94 deletions(-) diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java index 6becc2eb6e385..a2274c952b4c3 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java @@ -29,10 +29,10 @@ public void testGetStackTracesUnfiltered() throws Exception { assertNotNull(response.getStackTraces()); // just do a high-level spot check. Decoding is tested in unit-tests StackTrace stackTrace = response.getStackTraces().get("L7kj7UvlKbT-vN73el4faQ"); - assertEquals(18, stackTrace.addressOrLines.size()); - assertEquals(18, stackTrace.fileIds.size()); - assertEquals(18, stackTrace.frameIds.size()); - assertEquals(18, stackTrace.typeIds.size()); + assertEquals(18, stackTrace.addressOrLines.length); + assertEquals(18, stackTrace.fileIds.length); + assertEquals(18, stackTrace.frameIds.length); + assertEquals(18, stackTrace.typeIds.length); assertEquals(0.0000048475146d, stackTrace.annualCO2Tons, 0.0000000001d); assertEquals(0.18834d, stackTrace.annualCostsUSD, 0.00001d); @@ -73,10 +73,10 @@ public void testGetStackTracesFromAPMWithMatchNoDownsampling() throws Exception assertNotNull(response.getStackTraces()); // just do a high-level spot check. Decoding is tested in unit-tests StackTrace stackTrace = response.getStackTraces().get("Ce77w10WeIDow3kd1jowlA"); - assertEquals(39, stackTrace.addressOrLines.size()); - assertEquals(39, stackTrace.fileIds.size()); - assertEquals(39, stackTrace.frameIds.size()); - assertEquals(39, stackTrace.typeIds.size()); + assertEquals(39, stackTrace.addressOrLines.length); + assertEquals(39, stackTrace.fileIds.length); + assertEquals(39, stackTrace.frameIds.length); + assertEquals(39, stackTrace.typeIds.length); assertTrue(stackTrace.annualCO2Tons > 0.0d); assertTrue(stackTrace.annualCostsUSD > 0.0d); @@ -139,10 +139,10 @@ public int hashCode() { assertNotNull(response.getStackTraces()); // just do a high-level spot check. Decoding is tested in unit-tests StackTrace stackTrace = response.getStackTraces().get("Ce77w10WeIDow3kd1jowlA"); - assertEquals(39, stackTrace.addressOrLines.size()); - assertEquals(39, stackTrace.fileIds.size()); - assertEquals(39, stackTrace.frameIds.size()); - assertEquals(39, stackTrace.typeIds.size()); + assertEquals(39, stackTrace.addressOrLines.length); + assertEquals(39, stackTrace.fileIds.length); + assertEquals(39, stackTrace.frameIds.length); + assertEquals(39, stackTrace.typeIds.length); assertTrue(stackTrace.annualCO2Tons > 0.0d); assertTrue(stackTrace.annualCostsUSD > 0.0d); diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackTrace.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackTrace.java index b417e267f12da..b039bf22110b1 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackTrace.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/StackTrace.java @@ -12,11 +12,8 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; -import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.function.Consumer; final class StackTrace implements ToXContentObject { @@ -25,20 +22,20 @@ final class StackTrace implements ToXContentObject { static final int NATIVE_FRAME_TYPE = 3; static final int KERNEL_FRAME_TYPE = 4; - List addressOrLines; - List fileIds; - List frameIds; - List typeIds; + int[] addressOrLines; + String[] fileIds; + String[] frameIds; + int[] typeIds; double annualCO2Tons; double annualCostsUSD; long count; StackTrace( - List addressOrLines, - List fileIds, - List frameIds, - List typeIds, + int[] addressOrLines, + String[] fileIds, + String[] frameIds, + int[] typeIds, double annualCO2Tons, double annualCostsUSD, long count @@ -84,8 +81,8 @@ final class StackTrace implements ToXContentObject { * @return Corresponding numbers that are encoded in the input. */ // package-private for testing - static List runLengthDecodeBase64Url(String input, int size, int capacity) { - Integer[] output = new Integer[capacity]; + static int[] runLengthDecodeBase64Url(String input, int size, int capacity) { + int[] output = new int[capacity]; int multipleOf8 = size / 8; int remainder = size % 8; @@ -138,7 +135,6 @@ static List runLengthDecodeBase64Url(String input, int size, int capaci value = n & 0xff; Arrays.fill(output, j, j + count, value); - j += count; } else if (remainder == 3) { n = (charCodeAt(input, i) << 12) | (charCodeAt(input, i + 1) << 6) | charCodeAt(input, i + 2); n >>= 2; @@ -147,12 +143,8 @@ static List runLengthDecodeBase64Url(String input, int size, int capaci value = n & 0xff; Arrays.fill(output, j, j + count, value); - j += count; - } - if (j < capacity) { - Arrays.fill(output, j, capacity, 0); } - return Arrays.asList(output); + return output; } // package-private for testing @@ -195,9 +187,9 @@ public static StackTrace fromSource(Map source) { String inputFrameTypes = ObjectPath.eval(PATH_FRAME_TYPES, source); int countsFrameIDs = inputFrameIDs.length() / BASE64_FRAME_ID_LENGTH; - List fileIDs = new ArrayList<>(countsFrameIDs); - List frameIDs = new ArrayList<>(countsFrameIDs); - List addressOrLines = new ArrayList<>(countsFrameIDs); + String[] fileIDs = new String[countsFrameIDs]; + String[] frameIDs = new String[countsFrameIDs]; + int[] addressOrLines = new int[countsFrameIDs]; // Step 1: Convert the base64-encoded frameID list into two separate // lists (frame IDs and file IDs), both of which are also base64-encoded. @@ -210,22 +202,22 @@ public static StackTrace fromSource(Map source) { // address (see diagram in definition of EncodedStackTrace). for (int i = 0, pos = 0; i < countsFrameIDs; i++, pos += BASE64_FRAME_ID_LENGTH) { String frameID = inputFrameIDs.substring(pos, pos + BASE64_FRAME_ID_LENGTH); - frameIDs.add(frameID); - fileIDs.add(getFileIDFromStackFrameID(frameID)); - addressOrLines.add(getAddressFromStackFrameID(frameID)); + frameIDs[i] = frameID; + fileIDs[i] = getFileIDFromStackFrameID(frameID); + addressOrLines[i] = getAddressFromStackFrameID(frameID); } // Step 2: Convert the run-length byte encoding into a list of uint8s. - List typeIDs = runLengthDecodeBase64Url(inputFrameTypes, inputFrameTypes.length(), countsFrameIDs); + int[] typeIDs = runLengthDecodeBase64Url(inputFrameTypes, inputFrameTypes.length(), countsFrameIDs); return new StackTrace(addressOrLines, fileIDs, frameIDs, typeIDs, 0, 0, 0); } public void forNativeAndKernelFrames(Consumer consumer) { - for (int i = 0; i < this.fileIds.size(); i++) { - Integer frameType = this.typeIds.get(i); - if (frameType != null && (frameType == NATIVE_FRAME_TYPE || frameType == KERNEL_FRAME_TYPE)) { - consumer.accept(this.fileIds.get(i)); + for (int i = 0; i < this.fileIds.length; i++) { + int frameType = this.typeIds[i]; + if (frameType == NATIVE_FRAME_TYPE || frameType == KERNEL_FRAME_TYPE) { + consumer.accept(this.fileIds[i]); } } } @@ -251,16 +243,20 @@ public boolean equals(Object o) { return false; } StackTrace that = (StackTrace) o; - return addressOrLines.equals(that.addressOrLines) - && fileIds.equals(that.fileIds) - && frameIds.equals(that.frameIds) - && typeIds.equals(that.typeIds); + return Arrays.equals(addressOrLines, that.addressOrLines) + && Arrays.equals(fileIds, that.fileIds) + && Arrays.equals(frameIds, that.frameIds) + && Arrays.equals(typeIds, that.typeIds); // Don't compare metadata like annualized co2, annualized costs and count. } // Don't hash metadata like annualized co2, annualized costs and count. @Override public int hashCode() { - return Objects.hash(addressOrLines, fileIds, frameIds, typeIds); + int result = Arrays.hashCode(addressOrLines); + result = 31 * result + Arrays.hashCode(fileIds); + result = 31 * result + Arrays.hashCode(frameIds); + result = 31 * result + Arrays.hashCode(typeIds); + return result; } } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java index dd78d6f1815f5..39b73db41aeef 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java @@ -91,12 +91,12 @@ static GetFlamegraphResponse buildFlamegraph(GetStackTracesResponse response) { builder.addAnnualCostsUSDInclusive(0, annualCostsUSD); builder.addAnnualCostsUSDExclusive(0, 0.0d); - int frameCount = stackTrace.frameIds.size(); + int frameCount = stackTrace.frameIds.length; for (int i = 0; i < frameCount; i++) { - String frameId = stackTrace.frameIds.get(i); - String fileId = stackTrace.fileIds.get(i); - Integer frameType = stackTrace.typeIds.get(i); - Integer addressOrLine = stackTrace.addressOrLines.get(i); + String frameId = stackTrace.frameIds[i]; + String fileId = stackTrace.fileIds[i]; + int frameType = stackTrace.typeIds[i]; + int addressOrLine = stackTrace.addressOrLines[i]; StackFrame stackFrame = response.getStackFrames().getOrDefault(frameId, EMPTY_STACKFRAME); String executable = response.getExecutables().getOrDefault(fileId, ""); final boolean isLeafFrame = i == frameCount - 1; @@ -199,7 +199,7 @@ public int addNode( int frameType, boolean inline, String fileName, - Integer addressOrLine, + int addressOrLine, String functionName, int functionOffset, String sourceFileName, diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java index 2674893c2382f..cbb6f92fb417e 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java @@ -567,8 +567,8 @@ public void onStackTraceResponse(MultiGetResponse multiGetItemResponses) { StackTrace stacktrace = StackTrace.fromSource(trace.getResponse().getSource()); // Guard against concurrent access and ensure we only handle each item once if (stackTracePerId.putIfAbsent(id, stacktrace) == null) { - totalFrames.addAndGet(stacktrace.frameIds.size()); - stackFrameIds.addAll(stacktrace.frameIds); + totalFrames.addAndGet(stacktrace.frameIds.length); + stackFrameIds.addAll(List.of(stacktrace.frameIds)); stacktrace.forNativeAndKernelFrames(e -> executableIds.add(e)); } } diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseTests.java index 99a34719f96c9..3ebd2ef6a8aeb 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.test.AbstractChunkedSerializingTestCase; import org.elasticsearch.test.ESTestCase; -import java.util.List; import java.util.Map; public class GetStackTracesResponseTests extends ESTestCase { @@ -25,10 +24,10 @@ private GetStackTracesResponse createTestInstance() { Map.of( "QjoLteG7HX3VUUXr-J4kHQ", new StackTrace( - List.of(1083999), - List.of("QCCDqjSg3bMK1C4YRK6Tiw"), - List.of("QCCDqjSg3bMK1C4YRK6TiwAAAAAAEIpf"), - List.of(2), + new int[] { 1083999 }, + new String[] { "QCCDqjSg3bMK1C4YRK6Tiw" }, + new String[] { "QCCDqjSg3bMK1C4YRK6TiwAAAAAAEIpf" }, + new int[] { 2 }, 0.3d, 2.7d, 1 diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackTraceTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackTraceTests.java index 4765d23bd30d0..4f583b55f18f7 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackTraceTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/StackTraceTests.java @@ -16,8 +16,7 @@ import org.elasticsearch.xcontent.XContentType; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; +import java.util.Arrays; import java.util.Map; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; @@ -33,30 +32,30 @@ public void testDecodeFrameId() { public void testRunlengthDecodeUniqueValues() { // 0 - 9 (reversed) String encodedFrameTypes = "AQkBCAEHAQYBBQEEAQMBAgEBAQA"; - List actual = StackTrace.runLengthDecodeBase64Url(encodedFrameTypes, encodedFrameTypes.length(), 10); - assertEquals(List.of(9, 8, 7, 6, 5, 4, 3, 2, 1, 0), actual); + int[] actual = StackTrace.runLengthDecodeBase64Url(encodedFrameTypes, encodedFrameTypes.length(), 10); + assertArrayEquals(new int[] { 9, 8, 7, 6, 5, 4, 3, 2, 1, 0 }, actual); } public void testRunlengthDecodeSingleValue() { // "4", repeated ten times String encodedFrameTypes = "CgQ"; - List actual = StackTrace.runLengthDecodeBase64Url(encodedFrameTypes, encodedFrameTypes.length(), 10); - assertEquals(List.of(4, 4, 4, 4, 4, 4, 4, 4, 4, 4), actual); + int[] actual = StackTrace.runLengthDecodeBase64Url(encodedFrameTypes, encodedFrameTypes.length(), 10); + assertArrayEquals(new int[] { 4, 4, 4, 4, 4, 4, 4, 4, 4, 4 }, actual); } public void testRunlengthDecodeFillsGap() { // "2", repeated three times String encodedFrameTypes = "AwI"; - List actual = StackTrace.runLengthDecodeBase64Url(encodedFrameTypes, encodedFrameTypes.length(), 5); + int[] actual = StackTrace.runLengthDecodeBase64Url(encodedFrameTypes, encodedFrameTypes.length(), 5); // zeroes should be appended for the last two values which are not present in the encoded representation. - assertEquals(List.of(2, 2, 2, 0, 0), actual); + assertArrayEquals(new int[] { 2, 2, 2, 0, 0 }, actual); } public void testRunlengthDecodeMixedValue() { // 4 String encodedFrameTypes = "BQADAg"; - List actual = StackTrace.runLengthDecodeBase64Url(encodedFrameTypes, encodedFrameTypes.length(), 8); - assertEquals(List.of(0, 0, 0, 0, 0, 2, 2, 2), actual); + int[] actual = StackTrace.runLengthDecodeBase64Url(encodedFrameTypes, encodedFrameTypes.length(), 8); + assertArrayEquals(new int[] { 0, 0, 0, 0, 0, 2, 2, 2 }, actual); } public void testCreateFromSource() { @@ -73,10 +72,10 @@ public void testCreateFromSource() { ) ); // end::noformat - assertEquals(List.of("AAAAAAAAAAUAAAAAAAAB3gAAAAAAD67u"), stackTrace.frameIds); - assertEquals(List.of("AAAAAAAAAAUAAAAAAAAB3g"), stackTrace.fileIds); - assertEquals(List.of(1027822), stackTrace.addressOrLines); - assertEquals(List.of(2), stackTrace.typeIds); + assertArrayEquals(new String[] { "AAAAAAAAAAUAAAAAAAAB3gAAAAAAD67u" }, stackTrace.frameIds); + assertArrayEquals(new String[] { "AAAAAAAAAAUAAAAAAAAB3g" }, stackTrace.fileIds); + assertArrayEquals(new int[] { 1027822 }, stackTrace.addressOrLines); + assertArrayEquals(new int[] { 2 }, stackTrace.typeIds); } public void testToXContent() throws IOException { @@ -94,10 +93,10 @@ public void testToXContent() throws IOException { XContentBuilder actualRequest = XContentFactory.contentBuilder(contentType); StackTrace stackTrace = new StackTrace( - List.of(1027822), - List.of("AAAAAAAAAAUAAAAAAAAB3g"), - List.of("AAAAAAAAAAUAAAAAAAAB3gAAAAAAD67u"), - List.of(2), + new int[] { 1027822 }, + new String[] { "AAAAAAAAAAUAAAAAAAAB3g" }, + new String[] { "AAAAAAAAAAUAAAAAAAAB3gAAAAAAD67u" }, + new int[] { 2 }, 0.3d, 2.7d, 1 @@ -109,10 +108,10 @@ public void testToXContent() throws IOException { public void testEquality() { StackTrace stackTrace = new StackTrace( - List.of(102782), - List.of("AAAAAAAAAAUAAAAAAAAB3g"), - List.of("AAAAAAAAAAUAAAAAAAAB3gAAAAAAD67u"), - List.of(2), + new int[] { 102782 }, + new String[] { "AAAAAAAAAAUAAAAAAAAB3g" }, + new String[] { "AAAAAAAAAAUAAAAAAAAB3gAAAAAAD67u" }, + new int[] { 2 }, 0.3d, 2.7d, 1 @@ -121,10 +120,10 @@ public void testEquality() { EqualsHashCodeTestUtils.checkEqualsAndHashCode( stackTrace, (o -> new StackTrace( - new ArrayList<>(o.addressOrLines), - new ArrayList<>(o.fileIds), - new ArrayList<>(o.frameIds), - new ArrayList<>(o.typeIds), + Arrays.copyOf(o.addressOrLines, o.addressOrLines.length), + Arrays.copyOf(o.fileIds, o.fileIds.length), + Arrays.copyOf(o.frameIds, o.frameIds.length), + Arrays.copyOf(o.typeIds, o.typeIds.length), 0.3d, 2.7d, 1 diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphActionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphActionTests.java index 32735e5db935a..fd20ed04978f2 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphActionTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphActionTests.java @@ -18,8 +18,8 @@ public void testCreateFlamegraph() { Map.of( "2buqP1GpF-TXYmL4USW8gA", new StackTrace( - List.of(12784352, 19334053, 19336161, 18795859, 18622708, 18619213, 12989721, 13658842, 16339645), - List.of( + new int[] { 12784352, 19334053, 19336161, 18795859, 18622708, 18619213, 12989721, 13658842, 16339645 }, + new String[] { "fr28zxcZ2UDasxYuu6dV-w", "fr28zxcZ2UDasxYuu6dV-w", "fr28zxcZ2UDasxYuu6dV-w", @@ -28,9 +28,8 @@ public void testCreateFlamegraph() { "fr28zxcZ2UDasxYuu6dV-w", "fr28zxcZ2UDasxYuu6dV-w", "fr28zxcZ2UDasxYuu6dV-w", - "fr28zxcZ2UDasxYuu6dV-w" - ), - List.of( + "fr28zxcZ2UDasxYuu6dV-w" }, + new String[] { "fr28zxcZ2UDasxYuu6dV-wAAAAAAwxLg", "fr28zxcZ2UDasxYuu6dV-wAAAAABJwOl", "fr28zxcZ2UDasxYuu6dV-wAAAAABJwvh", @@ -39,9 +38,8 @@ public void testCreateFlamegraph() { "fr28zxcZ2UDasxYuu6dV-wAAAAABHBtN", "fr28zxcZ2UDasxYuu6dV-wAAAAAAxjUZ", "fr28zxcZ2UDasxYuu6dV-wAAAAAA0Gra", - "fr28zxcZ2UDasxYuu6dV-wAAAAAA-VK9" - ), - List.of(3, 3, 3, 3, 3, 3, 3, 3, 3), + "fr28zxcZ2UDasxYuu6dV-wAAAAAA-VK9" }, + new int[] { 3, 3, 3, 3, 3, 3, 3, 3, 3 }, 0.3d, 2.7d, 1 From a58b2c2b05052cb98cb85c070dc0efeb69d29e5b Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Wed, 7 Feb 2024 15:00:38 +0100 Subject: [PATCH 104/106] Move doc-values classes needed by ST_INTERSECTS to server (#104980) * Move doc-values classes needed by ST_INTERSECTS to server This classes are needed by ESQL spatial queries, and are not licensed in a way that prevents this move. Since they depend on lucene it is not possible to move them to a library. Instead they are moved to be co-located with the GeoPoint doc-values classes that already exist in server. * Moved to lucene package org.elasticsearch.lucene.spatial * Moved Geo/ShapeDocValuesQuery to server because it is Lucene specific And this gives us access to these classes from ESQL for lucene-pushdown of spatial queries. --- server/src/main/java/module-info.java | 1 + .../spatial}/BinaryShapeDocValuesField.java | 10 ++- .../CartesianShapeCoordinateEncoder.java | 7 +- .../CartesianShapeDocValuesQuery.java | 8 +-- .../spatial}/CartesianShapeIndexer.java | 9 +-- .../lucene/spatial}/CentroidCalculator.java | 7 +- .../lucene/spatial}/Component2DVisitor.java | 15 ++-- .../lucene/spatial}/CoordinateEncoder.java | 7 +- .../lucene/spatial}/DimensionalShapeType.java | 7 +- .../elasticsearch/lucene/spatial}/Extent.java | 11 +-- .../spatial}/GeoShapeCoordinateEncoder.java | 7 +- .../spatial}/GeometryDocValueReader.java | 17 ++--- .../spatial}/GeometryDocValueWriter.java | 7 +- .../spatial}/LatLonShapeDocValuesQuery.java | 12 ++-- .../lucene/spatial}/ShapeDocValuesQuery.java | 10 ++- .../lucene/spatial}/TriangleTreeReader.java | 15 ++-- .../lucene/spatial}/TriangleTreeVisitor.java | 13 ++-- .../lucene/spatial}/TriangleTreeWriter.java | 9 +-- .../CartesianCentroidCalculatorTests.java | 7 +- .../CartesianShapeCoordinateEncoderTests.java | 7 +- .../CartesianShapeDocValuesQueryTests.java | 8 +-- .../spatial}/CentroidCalculatorTests.java | 16 ++--- .../spatial/GeoCentroidCalculatorTests.java | 68 +++++++++++++++++++ .../GeoShapeCoordinateEncoderTests.java | 7 +- .../LatLonShapeDocValuesQueryTests.java | 8 +-- .../lucene/spatial}/TriangleTreeTests.java | 7 +- .../search/GeoShapeScriptDocValuesIT.java | 2 +- .../index/fielddata/CartesianShapeValues.java | 4 +- .../fielddata/Component2DRelationVisitor.java | 10 +-- .../fielddata/GeoShapeScriptDocValues.java | 5 +- .../index/fielddata/GeoShapeValues.java | 2 + .../index/fielddata/LabelPositionVisitor.java | 2 + .../spatial/index/fielddata/ShapeValues.java | 8 ++- .../index/fielddata/Tile2DVisitor.java | 9 ++- .../GeoShapeWithDocValuesFieldMapper.java | 4 +- .../index/mapper/ShapeFieldMapper.java | 4 +- .../index/query/ShapeQueryProcessor.java | 2 +- .../bucket/geogrid/GeoHexVisitor.java | 10 +-- .../CartesianShapeCentroidAggregator.java | 2 +- .../metrics/GeoShapeCentroidAggregator.java | 2 +- .../GeoShapeScriptFieldGeoShapeQuery.java | 10 +-- ...a => GeoCentroidCalculatorExtraTests.java} | 48 ++----------- .../fielddata/GeometryDocValueTests.java | 4 ++ .../LatLonGeometryRelationVisitorTests.java | 3 + .../fielddata/TestCoordinateEncoder.java | 46 ------------- .../index/fielddata/Tile2DVisitorTests.java | 39 +++++++++++ .../mapper/GeoShapeScriptMapperTests.java | 6 +- .../spatial/ingest/CircleProcessorTests.java | 2 +- .../bucket/geogrid/GeoHexVisitorTests.java | 4 +- .../geogrid/GeoShapeGeoGridTestCase.java | 2 +- ...CartesianShapeCentroidAggregatorTests.java | 4 +- .../GeoShapeCentroidAggregatorTests.java | 4 +- .../xpack/spatial/util/GeoTestUtils.java | 12 ++-- 53 files changed, 310 insertions(+), 240 deletions(-) rename {x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper => server/src/main/java/org/elasticsearch/lucene/spatial}/BinaryShapeDocValuesField.java (79%) rename {x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata => server/src/main/java/org/elasticsearch/lucene/spatial}/CartesianShapeCoordinateEncoder.java (85%) rename {x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper => server/src/main/java/org/elasticsearch/lucene/spatial}/CartesianShapeDocValuesQuery.java (79%) rename {x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper => server/src/main/java/org/elasticsearch/lucene/spatial}/CartesianShapeIndexer.java (92%) rename {x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata => server/src/main/java/org/elasticsearch/lucene/spatial}/CentroidCalculator.java (97%) rename {x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata => server/src/main/java/org/elasticsearch/lucene/spatial}/Component2DVisitor.java (95%) rename {x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata => server/src/main/java/org/elasticsearch/lucene/spatial}/CoordinateEncoder.java (78%) rename {x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata => server/src/main/java/org/elasticsearch/lucene/spatial}/DimensionalShapeType.java (82%) rename {x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata => server/src/main/java/org/elasticsearch/lucene/spatial}/Extent.java (96%) rename {x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata => server/src/main/java/org/elasticsearch/lucene/spatial}/GeoShapeCoordinateEncoder.java (85%) rename {x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata => server/src/main/java/org/elasticsearch/lucene/spatial}/GeometryDocValueReader.java (87%) rename {x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata => server/src/main/java/org/elasticsearch/lucene/spatial}/GeometryDocValueWriter.java (85%) rename {x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper => server/src/main/java/org/elasticsearch/lucene/spatial}/LatLonShapeDocValuesQuery.java (73%) rename {x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper => server/src/main/java/org/elasticsearch/lucene/spatial}/ShapeDocValuesQuery.java (96%) rename {x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata => server/src/main/java/org/elasticsearch/lucene/spatial}/TriangleTreeReader.java (90%) rename {x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata => server/src/main/java/org/elasticsearch/lucene/spatial}/TriangleTreeVisitor.java (91%) rename {x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata => server/src/main/java/org/elasticsearch/lucene/spatial}/TriangleTreeWriter.java (97%) rename {x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata => server/src/test/java/org/elasticsearch/lucene/spatial}/CartesianCentroidCalculatorTests.java (88%) rename {x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata => server/src/test/java/org/elasticsearch/lucene/spatial}/CartesianShapeCoordinateEncoderTests.java (90%) rename {x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper => server/src/test/java/org/elasticsearch/lucene/spatial}/CartesianShapeDocValuesQueryTests.java (96%) rename {x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata => server/src/test/java/org/elasticsearch/lucene/spatial}/CentroidCalculatorTests.java (97%) create mode 100644 server/src/test/java/org/elasticsearch/lucene/spatial/GeoCentroidCalculatorTests.java rename {x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata => server/src/test/java/org/elasticsearch/lucene/spatial}/GeoShapeCoordinateEncoderTests.java (91%) rename {x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper => server/src/test/java/org/elasticsearch/lucene/spatial}/LatLonShapeDocValuesQueryTests.java (96%) rename {x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata => server/src/test/java/org/elasticsearch/lucene/spatial}/TriangleTreeTests.java (92%) rename x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/{GeoCentroidCalculatorTests.java => GeoCentroidCalculatorExtraTests.java} (61%) delete mode 100644 x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/TestCoordinateEncoder.java diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 78086d28446b6..f099852a8f428 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -434,4 +434,5 @@ org.elasticsearch.shardhealth, org.elasticsearch.serverless.shardhealth, org.elasticsearch.serverless.apifiltering; + exports org.elasticsearch.lucene.spatial; } diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/BinaryShapeDocValuesField.java b/server/src/main/java/org/elasticsearch/lucene/spatial/BinaryShapeDocValuesField.java similarity index 79% rename from x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/BinaryShapeDocValuesField.java rename to server/src/main/java/org/elasticsearch/lucene/spatial/BinaryShapeDocValuesField.java index eb8567f91cafb..70ca08482c15a 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/BinaryShapeDocValuesField.java +++ b/server/src/main/java/org/elasticsearch/lucene/spatial/BinaryShapeDocValuesField.java @@ -1,20 +1,18 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. */ -package org.elasticsearch.xpack.spatial.index.mapper; +package org.elasticsearch.lucene.spatial; import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.index.mapper.CustomDocValuesField; -import org.elasticsearch.xpack.spatial.index.fielddata.CentroidCalculator; -import org.elasticsearch.xpack.spatial.index.fielddata.CoordinateEncoder; -import org.elasticsearch.xpack.spatial.index.fielddata.GeometryDocValueWriter; import java.io.IOException; import java.util.ArrayList; diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/CartesianShapeCoordinateEncoder.java b/server/src/main/java/org/elasticsearch/lucene/spatial/CartesianShapeCoordinateEncoder.java similarity index 85% rename from x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/CartesianShapeCoordinateEncoder.java rename to server/src/main/java/org/elasticsearch/lucene/spatial/CartesianShapeCoordinateEncoder.java index f8a7ebe8b150d..aa043f8c401be 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/CartesianShapeCoordinateEncoder.java +++ b/server/src/main/java/org/elasticsearch/lucene/spatial/CartesianShapeCoordinateEncoder.java @@ -1,11 +1,12 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. */ -package org.elasticsearch.xpack.spatial.index.fielddata; +package org.elasticsearch.lucene.spatial; import org.apache.lucene.geo.XYEncodingUtils; diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/CartesianShapeDocValuesQuery.java b/server/src/main/java/org/elasticsearch/lucene/spatial/CartesianShapeDocValuesQuery.java similarity index 79% rename from x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/CartesianShapeDocValuesQuery.java rename to server/src/main/java/org/elasticsearch/lucene/spatial/CartesianShapeDocValuesQuery.java index a3c50d5cb6162..5d377ea97e21b 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/CartesianShapeDocValuesQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/spatial/CartesianShapeDocValuesQuery.java @@ -1,16 +1,16 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. */ -package org.elasticsearch.xpack.spatial.index.mapper; +package org.elasticsearch.lucene.spatial; import org.apache.lucene.document.ShapeField; import org.apache.lucene.geo.Component2D; import org.apache.lucene.geo.XYGeometry; -import org.elasticsearch.xpack.spatial.index.fielddata.CoordinateEncoder; /** Lucene geometry query for {@link BinaryShapeDocValuesField}. */ public class CartesianShapeDocValuesQuery extends ShapeDocValuesQuery { diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/CartesianShapeIndexer.java b/server/src/main/java/org/elasticsearch/lucene/spatial/CartesianShapeIndexer.java similarity index 92% rename from x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/CartesianShapeIndexer.java rename to server/src/main/java/org/elasticsearch/lucene/spatial/CartesianShapeIndexer.java index c23d63baa5791..ca18d2a854dbb 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/CartesianShapeIndexer.java +++ b/server/src/main/java/org/elasticsearch/lucene/spatial/CartesianShapeIndexer.java @@ -1,10 +1,11 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. */ -package org.elasticsearch.xpack.spatial.index.mapper; +package org.elasticsearch.lucene.spatial; import org.apache.lucene.document.XYShape; import org.apache.lucene.index.IndexableField; @@ -47,7 +48,7 @@ public List indexShape(Geometry shape) { return visitor.fields; } - private class LuceneGeometryVisitor implements GeometryVisitor { + private static class LuceneGeometryVisitor implements GeometryVisitor { private List fields = new ArrayList<>(); private String name; diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/CentroidCalculator.java b/server/src/main/java/org/elasticsearch/lucene/spatial/CentroidCalculator.java similarity index 97% rename from x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/CentroidCalculator.java rename to server/src/main/java/org/elasticsearch/lucene/spatial/CentroidCalculator.java index c00cfdba4d3c1..b63f650a539d8 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/CentroidCalculator.java +++ b/server/src/main/java/org/elasticsearch/lucene/spatial/CentroidCalculator.java @@ -1,11 +1,12 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. */ -package org.elasticsearch.xpack.spatial.index.fielddata; +package org.elasticsearch.lucene.spatial; import org.elasticsearch.geometry.Circle; import org.elasticsearch.geometry.Geometry; diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/Component2DVisitor.java b/server/src/main/java/org/elasticsearch/lucene/spatial/Component2DVisitor.java similarity index 95% rename from x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/Component2DVisitor.java rename to server/src/main/java/org/elasticsearch/lucene/spatial/Component2DVisitor.java index b5804d18a4e6e..ea79f77c1a075 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/Component2DVisitor.java +++ b/server/src/main/java/org/elasticsearch/lucene/spatial/Component2DVisitor.java @@ -1,20 +1,21 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. */ -package org.elasticsearch.xpack.spatial.index.fielddata; +package org.elasticsearch.lucene.spatial; import org.apache.lucene.document.ShapeField; import org.apache.lucene.geo.Component2D; import org.apache.lucene.index.PointValues; -import static org.elasticsearch.xpack.spatial.index.fielddata.TriangleTreeVisitor.TriangleTreeDecodedVisitor; -import static org.elasticsearch.xpack.spatial.index.fielddata.TriangleTreeVisitor.abFromTriangle; -import static org.elasticsearch.xpack.spatial.index.fielddata.TriangleTreeVisitor.bcFromTriangle; -import static org.elasticsearch.xpack.spatial.index.fielddata.TriangleTreeVisitor.caFromTriangle; +import static org.elasticsearch.lucene.spatial.TriangleTreeVisitor.TriangleTreeDecodedVisitor; +import static org.elasticsearch.lucene.spatial.TriangleTreeVisitor.abFromTriangle; +import static org.elasticsearch.lucene.spatial.TriangleTreeVisitor.bcFromTriangle; +import static org.elasticsearch.lucene.spatial.TriangleTreeVisitor.caFromTriangle; /** * A {@link TriangleTreeDecodedVisitor} implementation for {@link Component2D} geometries. diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/CoordinateEncoder.java b/server/src/main/java/org/elasticsearch/lucene/spatial/CoordinateEncoder.java similarity index 78% rename from x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/CoordinateEncoder.java rename to server/src/main/java/org/elasticsearch/lucene/spatial/CoordinateEncoder.java index 1458282fc335d..e10687246277b 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/CoordinateEncoder.java +++ b/server/src/main/java/org/elasticsearch/lucene/spatial/CoordinateEncoder.java @@ -1,11 +1,12 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. */ -package org.elasticsearch.xpack.spatial.index.fielddata; +package org.elasticsearch.lucene.spatial; /** * Interface for classes that help encode double-valued spatial coordinates x/y to diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/DimensionalShapeType.java b/server/src/main/java/org/elasticsearch/lucene/spatial/DimensionalShapeType.java similarity index 82% rename from x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/DimensionalShapeType.java rename to server/src/main/java/org/elasticsearch/lucene/spatial/DimensionalShapeType.java index 4cf8895893738..09be37653b14f 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/DimensionalShapeType.java +++ b/server/src/main/java/org/elasticsearch/lucene/spatial/DimensionalShapeType.java @@ -1,11 +1,12 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. */ -package org.elasticsearch.xpack.spatial.index.fielddata; +package org.elasticsearch.lucene.spatial; import org.elasticsearch.common.io.stream.ByteArrayStreamInput; import org.elasticsearch.common.io.stream.BytesStreamOutput; diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/Extent.java b/server/src/main/java/org/elasticsearch/lucene/spatial/Extent.java similarity index 96% rename from x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/Extent.java rename to server/src/main/java/org/elasticsearch/lucene/spatial/Extent.java index 1c8be0e3b806a..a5d7a81410089 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/Extent.java +++ b/server/src/main/java/org/elasticsearch/lucene/spatial/Extent.java @@ -1,11 +1,12 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. */ -package org.elasticsearch.xpack.spatial.index.fielddata; +package org.elasticsearch.lucene.spatial; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -16,7 +17,7 @@ /** * Object representing the extent of a geometry object within a {@link TriangleTreeWriter}. */ -class Extent { +public class Extent { public int top; public int bottom; @@ -206,7 +207,7 @@ public static Extent fromPoint(int x, int y) { * @param topRightY the top-right y-coordinate * @return the extent of the two points */ - static Extent fromPoints(int bottomLeftX, int bottomLeftY, int topRightX, int topRightY) { + public static Extent fromPoints(int bottomLeftX, int bottomLeftY, int topRightX, int topRightY) { int negLeft = Integer.MAX_VALUE; int negRight = Integer.MIN_VALUE; int posLeft = Integer.MAX_VALUE; diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoShapeCoordinateEncoder.java b/server/src/main/java/org/elasticsearch/lucene/spatial/GeoShapeCoordinateEncoder.java similarity index 85% rename from x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoShapeCoordinateEncoder.java rename to server/src/main/java/org/elasticsearch/lucene/spatial/GeoShapeCoordinateEncoder.java index ead1b44abe51e..29067d41ac9d3 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoShapeCoordinateEncoder.java +++ b/server/src/main/java/org/elasticsearch/lucene/spatial/GeoShapeCoordinateEncoder.java @@ -1,11 +1,12 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. */ -package org.elasticsearch.xpack.spatial.index.fielddata; +package org.elasticsearch.lucene.spatial; import org.apache.lucene.geo.GeoEncodingUtils; import org.elasticsearch.common.geo.GeoUtils; diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeometryDocValueReader.java b/server/src/main/java/org/elasticsearch/lucene/spatial/GeometryDocValueReader.java similarity index 87% rename from x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeometryDocValueReader.java rename to server/src/main/java/org/elasticsearch/lucene/spatial/GeometryDocValueReader.java index 16b655a1ad034..25c9a580f8dc1 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeometryDocValueReader.java +++ b/server/src/main/java/org/elasticsearch/lucene/spatial/GeometryDocValueReader.java @@ -1,11 +1,12 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. */ -package org.elasticsearch.xpack.spatial.index.fielddata; +package org.elasticsearch.lucene.spatial; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.stream.ByteArrayStreamInput; @@ -60,7 +61,7 @@ public void reset(BytesRef bytesRef) throws IOException { /** * returns the {@link Extent} of this geometry. */ - protected Extent getExtent() throws IOException { + public Extent getExtent() throws IOException { if (treeOffset == 0) { getSumCentroidWeight(); // skip CENTROID_HEADER + var-long sum-weight Extent.readFromCompressed(input, extent); @@ -74,7 +75,7 @@ protected Extent getExtent() throws IOException { /** * returns the encoded X coordinate of the centroid. */ - protected int getCentroidX() throws IOException { + public int getCentroidX() throws IOException { input.setPosition(docValueOffset + 0); return input.readInt(); } @@ -82,17 +83,17 @@ protected int getCentroidX() throws IOException { /** * returns the encoded Y coordinate of the centroid. */ - protected int getCentroidY() throws IOException { + public int getCentroidY() throws IOException { input.setPosition(docValueOffset + 4); return input.readInt(); } - protected DimensionalShapeType getDimensionalShapeType() { + public DimensionalShapeType getDimensionalShapeType() { input.setPosition(docValueOffset + 8); return DimensionalShapeType.readFrom(input); } - protected double getSumCentroidWeight() throws IOException { + public double getSumCentroidWeight() throws IOException { input.setPosition(docValueOffset + 9); return Double.longBitsToDouble(input.readVLong()); } diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeometryDocValueWriter.java b/server/src/main/java/org/elasticsearch/lucene/spatial/GeometryDocValueWriter.java similarity index 85% rename from x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeometryDocValueWriter.java rename to server/src/main/java/org/elasticsearch/lucene/spatial/GeometryDocValueWriter.java index d168ca9563b57..135bdb931bb84 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeometryDocValueWriter.java +++ b/server/src/main/java/org/elasticsearch/lucene/spatial/GeometryDocValueWriter.java @@ -1,11 +1,12 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. */ -package org.elasticsearch.xpack.spatial.index.fielddata; +package org.elasticsearch.lucene.spatial; import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.BytesRef; diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/LatLonShapeDocValuesQuery.java b/server/src/main/java/org/elasticsearch/lucene/spatial/LatLonShapeDocValuesQuery.java similarity index 73% rename from x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/LatLonShapeDocValuesQuery.java rename to server/src/main/java/org/elasticsearch/lucene/spatial/LatLonShapeDocValuesQuery.java index 6926148f50314..b1d682e181e21 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/LatLonShapeDocValuesQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/spatial/LatLonShapeDocValuesQuery.java @@ -1,24 +1,24 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. */ -package org.elasticsearch.xpack.spatial.index.mapper; +package org.elasticsearch.lucene.spatial; import org.apache.lucene.document.ShapeField; import org.apache.lucene.geo.Component2D; import org.apache.lucene.geo.LatLonGeometry; import org.apache.lucene.geo.Rectangle; -import org.elasticsearch.xpack.spatial.index.fielddata.CoordinateEncoder; import java.util.List; /** Lucene geometry query for {@link BinaryShapeDocValuesField}. */ -class LatLonShapeDocValuesQuery extends ShapeDocValuesQuery { +public class LatLonShapeDocValuesQuery extends ShapeDocValuesQuery { - LatLonShapeDocValuesQuery(String field, ShapeField.QueryRelation relation, LatLonGeometry... geometries) { + public LatLonShapeDocValuesQuery(String field, ShapeField.QueryRelation relation, LatLonGeometry... geometries) { super(field, CoordinateEncoder.GEO, relation, geometries); } diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeDocValuesQuery.java b/server/src/main/java/org/elasticsearch/lucene/spatial/ShapeDocValuesQuery.java similarity index 96% rename from x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeDocValuesQuery.java rename to server/src/main/java/org/elasticsearch/lucene/spatial/ShapeDocValuesQuery.java index 968ee86f3429e..6804901d9511e 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeDocValuesQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/spatial/ShapeDocValuesQuery.java @@ -1,11 +1,12 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. */ -package org.elasticsearch.xpack.spatial.index.mapper; +package org.elasticsearch.lucene.spatial; import org.apache.lucene.document.ShapeField; import org.apache.lucene.geo.Component2D; @@ -22,9 +23,6 @@ import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; -import org.elasticsearch.xpack.spatial.index.fielddata.Component2DVisitor; -import org.elasticsearch.xpack.spatial.index.fielddata.CoordinateEncoder; -import org.elasticsearch.xpack.spatial.index.fielddata.GeometryDocValueReader; import java.io.IOException; import java.util.ArrayList; diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeReader.java b/server/src/main/java/org/elasticsearch/lucene/spatial/TriangleTreeReader.java similarity index 90% rename from x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeReader.java rename to server/src/main/java/org/elasticsearch/lucene/spatial/TriangleTreeReader.java index 941bffc7442df..5ed80d61a39d2 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeReader.java +++ b/server/src/main/java/org/elasticsearch/lucene/spatial/TriangleTreeReader.java @@ -1,20 +1,21 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. */ -package org.elasticsearch.xpack.spatial.index.fielddata; +package org.elasticsearch.lucene.spatial; import org.elasticsearch.common.io.stream.ByteArrayStreamInput; import java.io.IOException; -import static org.elasticsearch.xpack.spatial.index.fielddata.TriangleTreeWriter.LEFT; -import static org.elasticsearch.xpack.spatial.index.fielddata.TriangleTreeWriter.LINE; -import static org.elasticsearch.xpack.spatial.index.fielddata.TriangleTreeWriter.POINT; -import static org.elasticsearch.xpack.spatial.index.fielddata.TriangleTreeWriter.RIGHT; +import static org.elasticsearch.lucene.spatial.TriangleTreeWriter.LEFT; +import static org.elasticsearch.lucene.spatial.TriangleTreeWriter.LINE; +import static org.elasticsearch.lucene.spatial.TriangleTreeWriter.POINT; +import static org.elasticsearch.lucene.spatial.TriangleTreeWriter.RIGHT; /** * A tree reader for a previous serialized {@link org.elasticsearch.geometry.Geometry} using diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeVisitor.java b/server/src/main/java/org/elasticsearch/lucene/spatial/TriangleTreeVisitor.java similarity index 91% rename from x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeVisitor.java rename to server/src/main/java/org/elasticsearch/lucene/spatial/TriangleTreeVisitor.java index 5afb2862cfeea..3156203125f24 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeVisitor.java +++ b/server/src/main/java/org/elasticsearch/lucene/spatial/TriangleTreeVisitor.java @@ -1,15 +1,16 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. */ -package org.elasticsearch.xpack.spatial.index.fielddata; +package org.elasticsearch.lucene.spatial; -import static org.elasticsearch.xpack.spatial.index.fielddata.TriangleTreeWriter.AB_FROM_TRIANGLE; -import static org.elasticsearch.xpack.spatial.index.fielddata.TriangleTreeWriter.BC_FROM_TRIANGLE; -import static org.elasticsearch.xpack.spatial.index.fielddata.TriangleTreeWriter.CA_FROM_TRIANGLE; +import static org.elasticsearch.lucene.spatial.TriangleTreeWriter.AB_FROM_TRIANGLE; +import static org.elasticsearch.lucene.spatial.TriangleTreeWriter.BC_FROM_TRIANGLE; +import static org.elasticsearch.lucene.spatial.TriangleTreeWriter.CA_FROM_TRIANGLE; /** Visitor for triangle interval tree. * diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeWriter.java b/server/src/main/java/org/elasticsearch/lucene/spatial/TriangleTreeWriter.java similarity index 97% rename from x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeWriter.java rename to server/src/main/java/org/elasticsearch/lucene/spatial/TriangleTreeWriter.java index a69f0f6d73365..88b2de5f6e8e1 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeWriter.java +++ b/server/src/main/java/org/elasticsearch/lucene/spatial/TriangleTreeWriter.java @@ -1,11 +1,12 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. */ -package org.elasticsearch.xpack.spatial.index.fielddata; +package org.elasticsearch.lucene.spatial; import org.apache.lucene.document.ShapeField; import org.apache.lucene.index.IndexableField; @@ -22,7 +23,7 @@ * This is a tree-writer that serializes a list of {@link ShapeField.DecodedTriangle} as an interval tree * into a byte array. */ -class TriangleTreeWriter { +public class TriangleTreeWriter { static final byte LEFT = 1; static final byte RIGHT = 1 << 1; diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/CartesianCentroidCalculatorTests.java b/server/src/test/java/org/elasticsearch/lucene/spatial/CartesianCentroidCalculatorTests.java similarity index 88% rename from x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/CartesianCentroidCalculatorTests.java rename to server/src/test/java/org/elasticsearch/lucene/spatial/CartesianCentroidCalculatorTests.java index 4ce533515d20e..1ac9631d39f15 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/CartesianCentroidCalculatorTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/spatial/CartesianCentroidCalculatorTests.java @@ -1,11 +1,12 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. */ -package org.elasticsearch.xpack.spatial.index.fielddata; +package org.elasticsearch.lucene.spatial; import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.geometry.Line; diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/CartesianShapeCoordinateEncoderTests.java b/server/src/test/java/org/elasticsearch/lucene/spatial/CartesianShapeCoordinateEncoderTests.java similarity index 90% rename from x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/CartesianShapeCoordinateEncoderTests.java rename to server/src/test/java/org/elasticsearch/lucene/spatial/CartesianShapeCoordinateEncoderTests.java index 8bcd09bc85efd..672109942d7df 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/CartesianShapeCoordinateEncoderTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/spatial/CartesianShapeCoordinateEncoderTests.java @@ -1,11 +1,12 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. */ -package org.elasticsearch.xpack.spatial.index.fielddata; +package org.elasticsearch.lucene.spatial; import org.apache.lucene.geo.XYEncodingUtils; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/CartesianShapeDocValuesQueryTests.java b/server/src/test/java/org/elasticsearch/lucene/spatial/CartesianShapeDocValuesQueryTests.java similarity index 96% rename from x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/CartesianShapeDocValuesQueryTests.java rename to server/src/test/java/org/elasticsearch/lucene/spatial/CartesianShapeDocValuesQueryTests.java index f2148799d1b5f..4ce3d87d6420d 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/CartesianShapeDocValuesQueryTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/spatial/CartesianShapeDocValuesQueryTests.java @@ -1,11 +1,12 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. */ -package org.elasticsearch.xpack.spatial.index.mapper; +package org.elasticsearch.lucene.spatial; import org.apache.lucene.document.Document; import org.apache.lucene.document.ShapeField; @@ -30,7 +31,6 @@ import org.elasticsearch.geo.XShapeTestUtil; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.spatial.index.fielddata.CoordinateEncoder; import java.io.IOException; import java.util.List; diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/CentroidCalculatorTests.java b/server/src/test/java/org/elasticsearch/lucene/spatial/CentroidCalculatorTests.java similarity index 97% rename from x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/CentroidCalculatorTests.java rename to server/src/test/java/org/elasticsearch/lucene/spatial/CentroidCalculatorTests.java index c96a8dcfd1f93..d15ea1ac2e469 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/CentroidCalculatorTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/spatial/CentroidCalculatorTests.java @@ -1,11 +1,12 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. */ -package org.elasticsearch.xpack.spatial.index.fielddata; +package org.elasticsearch.lucene.spatial; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.GeometryCollection; @@ -24,15 +25,14 @@ import org.hamcrest.BaseMatcher; import org.hamcrest.Description; import org.hamcrest.Matcher; -import org.locationtech.jts.io.ParseException; import java.util.ArrayList; import java.util.Collections; import java.util.List; -import static org.elasticsearch.xpack.spatial.index.fielddata.DimensionalShapeType.LINE; -import static org.elasticsearch.xpack.spatial.index.fielddata.DimensionalShapeType.POINT; -import static org.elasticsearch.xpack.spatial.index.fielddata.DimensionalShapeType.POLYGON; +import static org.elasticsearch.lucene.spatial.DimensionalShapeType.LINE; +import static org.elasticsearch.lucene.spatial.DimensionalShapeType.POINT; +import static org.elasticsearch.lucene.spatial.DimensionalShapeType.POLYGON; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -194,7 +194,7 @@ public void testLineAsClosedPoint() { assertThat(calculator, matchesCentroid(new Point(x, y), 1.0)); } - public void testPolygonAsLine() throws ParseException { + public void testPolygonAsLine() { // create a line that traces itself as a polygon, and should therefor have zero area Line sourceLine = randomLine(); double[] x = new double[2 * sourceLine.length() - 1]; diff --git a/server/src/test/java/org/elasticsearch/lucene/spatial/GeoCentroidCalculatorTests.java b/server/src/test/java/org/elasticsearch/lucene/spatial/GeoCentroidCalculatorTests.java new file mode 100644 index 0000000000000..d1769a85b9cb0 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/lucene/spatial/GeoCentroidCalculatorTests.java @@ -0,0 +1,68 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.lucene.spatial; + +import org.elasticsearch.geo.GeometryTestUtils; +import org.elasticsearch.geometry.Line; +import org.elasticsearch.geometry.MultiLine; +import org.elasticsearch.geometry.MultiPoint; +import org.elasticsearch.geometry.MultiPolygon; +import org.elasticsearch.geometry.Point; +import org.elasticsearch.geometry.Polygon; +import org.elasticsearch.geometry.Rectangle; + +/** + * The GeoCentroidCalculatorTests were moved to server, as part of the work to move the doc-values components needed by ESQL to server, + * this test class was split in two, most moving to server, but one test remaining in xpack.spatial because it depends on GeoShapeValues. + * See GeoCentroidCalculatorExtraTests.java for that. + */ +public class GeoCentroidCalculatorTests extends CentroidCalculatorTests { + protected Point randomPoint() { + return GeometryTestUtils.randomPoint(false); + } + + protected MultiPoint randomMultiPoint() { + return GeometryTestUtils.randomMultiPoint(false); + } + + protected Line randomLine() { + return GeometryTestUtils.randomLine(false); + } + + protected MultiLine randomMultiLine() { + return GeometryTestUtils.randomMultiLine(false); + } + + protected Polygon randomPolygon() { + return GeometryTestUtils.randomPolygon(false); + } + + protected MultiPolygon randomMultiPolygon() { + return GeometryTestUtils.randomMultiPolygon(false); + } + + protected Rectangle randomRectangle() { + return GeometryTestUtils.randomRectangle(); + } + + protected double randomY() { + return GeometryTestUtils.randomLat(); + } + + protected double randomX() { + return GeometryTestUtils.randomLon(); + } + + @Override + protected boolean ignoreAreaErrors() { + // Tests that calculate polygon areas with very large double values can have very large errors for flat polygons + // This would not happen in the tightly bounded case of geo-data, but for cartesian test data it happens a lot. + return false; + } +} diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoShapeCoordinateEncoderTests.java b/server/src/test/java/org/elasticsearch/lucene/spatial/GeoShapeCoordinateEncoderTests.java similarity index 91% rename from x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoShapeCoordinateEncoderTests.java rename to server/src/test/java/org/elasticsearch/lucene/spatial/GeoShapeCoordinateEncoderTests.java index 130658307dd09..1bd66a95dee32 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoShapeCoordinateEncoderTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/spatial/GeoShapeCoordinateEncoderTests.java @@ -1,11 +1,12 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. */ -package org.elasticsearch.xpack.spatial.index.fielddata; +package org.elasticsearch.lucene.spatial; import org.apache.lucene.geo.GeoEncodingUtils; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/LatLonShapeDocValuesQueryTests.java b/server/src/test/java/org/elasticsearch/lucene/spatial/LatLonShapeDocValuesQueryTests.java similarity index 96% rename from x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/LatLonShapeDocValuesQueryTests.java rename to server/src/test/java/org/elasticsearch/lucene/spatial/LatLonShapeDocValuesQueryTests.java index b3074c1e731aa..99fab30e3ade2 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/LatLonShapeDocValuesQueryTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/spatial/LatLonShapeDocValuesQueryTests.java @@ -1,11 +1,12 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. */ -package org.elasticsearch.xpack.spatial.index.mapper; +package org.elasticsearch.lucene.spatial; import org.apache.lucene.document.Document; import org.apache.lucene.document.LatLonShape; @@ -32,7 +33,6 @@ import org.elasticsearch.geometry.Geometry; import org.elasticsearch.index.mapper.GeoShapeIndexer; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.spatial.index.fielddata.CoordinateEncoder; import java.io.IOException; import java.util.List; diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeTests.java b/server/src/test/java/org/elasticsearch/lucene/spatial/TriangleTreeTests.java similarity index 92% rename from x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeTests.java rename to server/src/test/java/org/elasticsearch/lucene/spatial/TriangleTreeTests.java index 435d0f314412d..ca75d0434a338 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/TriangleTreeTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/spatial/TriangleTreeTests.java @@ -1,11 +1,12 @@ /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. */ -package org.elasticsearch.xpack.spatial.index.fielddata; +package org.elasticsearch.lucene.spatial; import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.BytesRef; diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeScriptDocValuesIT.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeScriptDocValuesIT.java index aa7860aac1a40..1025b24a7bcf6 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeScriptDocValuesIT.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeScriptDocValuesIT.java @@ -22,6 +22,7 @@ import org.elasticsearch.geometry.utils.GeographyValidator; import org.elasticsearch.geometry.utils.WellKnownText; import org.elasticsearch.index.mapper.GeoShapeIndexer; +import org.elasticsearch.lucene.spatial.DimensionalShapeType; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; @@ -31,7 +32,6 @@ import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; import org.elasticsearch.xpack.spatial.LocalStateSpatialPlugin; -import org.elasticsearch.xpack.spatial.index.fielddata.DimensionalShapeType; import org.elasticsearch.xpack.spatial.index.fielddata.GeoRelation; import org.elasticsearch.xpack.spatial.index.fielddata.GeoShapeValues; import org.elasticsearch.xpack.spatial.index.fielddata.LeafShapeFieldData; diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/CartesianShapeValues.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/CartesianShapeValues.java index 1bd7296b2da39..f9481bb8169ea 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/CartesianShapeValues.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/CartesianShapeValues.java @@ -13,9 +13,11 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.geometry.utils.GeometryValidator; import org.elasticsearch.geometry.utils.StandardValidator; +import org.elasticsearch.lucene.spatial.CartesianShapeIndexer; +import org.elasticsearch.lucene.spatial.CoordinateEncoder; +import org.elasticsearch.lucene.spatial.GeometryDocValueReader; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.elasticsearch.xpack.spatial.common.CartesianPoint; -import org.elasticsearch.xpack.spatial.index.mapper.CartesianShapeIndexer; import org.elasticsearch.xpack.spatial.search.aggregations.support.CartesianShapeValuesSourceType; import java.io.IOException; diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/Component2DRelationVisitor.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/Component2DRelationVisitor.java index 6c9cb9aab786e..6e6437e939a60 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/Component2DRelationVisitor.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/Component2DRelationVisitor.java @@ -9,11 +9,13 @@ import org.apache.lucene.geo.Component2D; import org.apache.lucene.index.PointValues; +import org.elasticsearch.lucene.spatial.CoordinateEncoder; +import org.elasticsearch.lucene.spatial.TriangleTreeWriter; -import static org.elasticsearch.xpack.spatial.index.fielddata.TriangleTreeVisitor.TriangleTreeDecodedVisitor; -import static org.elasticsearch.xpack.spatial.index.fielddata.TriangleTreeVisitor.abFromTriangle; -import static org.elasticsearch.xpack.spatial.index.fielddata.TriangleTreeVisitor.bcFromTriangle; -import static org.elasticsearch.xpack.spatial.index.fielddata.TriangleTreeVisitor.caFromTriangle; +import static org.elasticsearch.lucene.spatial.TriangleTreeVisitor.TriangleTreeDecodedVisitor; +import static org.elasticsearch.lucene.spatial.TriangleTreeVisitor.abFromTriangle; +import static org.elasticsearch.lucene.spatial.TriangleTreeVisitor.bcFromTriangle; +import static org.elasticsearch.lucene.spatial.TriangleTreeVisitor.caFromTriangle; /** * A reusable tree reader visitor for a previous serialized {@link org.elasticsearch.geometry.Geometry} using diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoShapeScriptDocValues.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoShapeScriptDocValues.java index 4840ca4034fd0..c9fceb61e071e 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoShapeScriptDocValues.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoShapeScriptDocValues.java @@ -11,9 +11,12 @@ import org.elasticsearch.common.geo.Orientation; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.index.mapper.GeoShapeIndexer; +import org.elasticsearch.lucene.spatial.BinaryShapeDocValuesField; +import org.elasticsearch.lucene.spatial.CentroidCalculator; +import org.elasticsearch.lucene.spatial.CoordinateEncoder; +import org.elasticsearch.lucene.spatial.GeometryDocValueWriter; import org.elasticsearch.script.GeometryFieldScript; import org.elasticsearch.search.aggregations.support.ValuesSourceType; -import org.elasticsearch.xpack.spatial.index.mapper.BinaryShapeDocValuesField; import org.elasticsearch.xpack.spatial.search.aggregations.support.GeoShapeValuesSourceType; import java.io.IOException; diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoShapeValues.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoShapeValues.java index fb32e9e1c4e4f..c5193d5412ec2 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoShapeValues.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoShapeValues.java @@ -16,6 +16,8 @@ import org.elasticsearch.geometry.utils.GeographyValidator; import org.elasticsearch.geometry.utils.GeometryValidator; import org.elasticsearch.index.mapper.GeoShapeIndexer; +import org.elasticsearch.lucene.spatial.CoordinateEncoder; +import org.elasticsearch.lucene.spatial.GeometryDocValueReader; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.elasticsearch.xpack.spatial.search.aggregations.support.GeoShapeValuesSourceType; diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/LabelPositionVisitor.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/LabelPositionVisitor.java index 6007a9df569b1..5883beeb81cca 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/LabelPositionVisitor.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/LabelPositionVisitor.java @@ -8,6 +8,8 @@ package org.elasticsearch.xpack.spatial.index.fielddata; import org.elasticsearch.common.geo.SpatialPoint; +import org.elasticsearch.lucene.spatial.CoordinateEncoder; +import org.elasticsearch.lucene.spatial.TriangleTreeVisitor; import java.util.function.BiFunction; diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/ShapeValues.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/ShapeValues.java index 2dcb2ff99848c..0c1bb45ffbd8d 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/ShapeValues.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/ShapeValues.java @@ -22,10 +22,16 @@ import org.elasticsearch.geometry.utils.GeometryValidator; import org.elasticsearch.geometry.utils.WellKnownText; import org.elasticsearch.index.mapper.ShapeIndexer; +import org.elasticsearch.lucene.spatial.BinaryShapeDocValuesField; +import org.elasticsearch.lucene.spatial.Component2DVisitor; +import org.elasticsearch.lucene.spatial.CoordinateEncoder; +import org.elasticsearch.lucene.spatial.DimensionalShapeType; +import org.elasticsearch.lucene.spatial.Extent; +import org.elasticsearch.lucene.spatial.GeometryDocValueReader; +import org.elasticsearch.lucene.spatial.TriangleTreeVisitor; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xpack.spatial.index.mapper.BinaryShapeDocValuesField; import java.io.IOException; import java.text.ParseException; diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/Tile2DVisitor.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/Tile2DVisitor.java index 76a5edf6bb8ce..87a7a8a1c4e15 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/Tile2DVisitor.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/fielddata/Tile2DVisitor.java @@ -7,10 +7,13 @@ package org.elasticsearch.xpack.spatial.index.fielddata; +import org.elasticsearch.lucene.spatial.TriangleTreeVisitor; +import org.elasticsearch.lucene.spatial.TriangleTreeWriter; + import static org.apache.lucene.geo.GeoUtils.orient; -import static org.elasticsearch.xpack.spatial.index.fielddata.TriangleTreeVisitor.abFromTriangle; -import static org.elasticsearch.xpack.spatial.index.fielddata.TriangleTreeVisitor.bcFromTriangle; -import static org.elasticsearch.xpack.spatial.index.fielddata.TriangleTreeVisitor.caFromTriangle; +import static org.elasticsearch.lucene.spatial.TriangleTreeVisitor.abFromTriangle; +import static org.elasticsearch.lucene.spatial.TriangleTreeVisitor.bcFromTriangle; +import static org.elasticsearch.lucene.spatial.TriangleTreeVisitor.caFromTriangle; /** * A reusable tree reader visitor for a previous serialized {@link org.elasticsearch.geometry.Geometry} using diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java index 892e251285f14..71fb9b0f3126a 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java @@ -51,6 +51,9 @@ import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.legacygeo.mapper.LegacyGeoShapeFieldMapper; +import org.elasticsearch.lucene.spatial.BinaryShapeDocValuesField; +import org.elasticsearch.lucene.spatial.CoordinateEncoder; +import org.elasticsearch.lucene.spatial.LatLonShapeDocValuesQuery; import org.elasticsearch.script.GeometryFieldScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptCompiler; @@ -59,7 +62,6 @@ import org.elasticsearch.script.field.Field; import org.elasticsearch.search.lookup.FieldValues; import org.elasticsearch.search.lookup.SearchLookup; -import org.elasticsearch.xpack.spatial.index.fielddata.CoordinateEncoder; import org.elasticsearch.xpack.spatial.index.fielddata.GeoShapeValues; import org.elasticsearch.xpack.spatial.index.fielddata.plain.AbstractAtomicGeoShapeShapeFieldData; import org.elasticsearch.xpack.spatial.index.fielddata.plain.LatLonShapeIndexFieldData; diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java index 21c4a1f97c3ef..0a1c0278d88d7 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java @@ -29,13 +29,15 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.lucene.spatial.BinaryShapeDocValuesField; +import org.elasticsearch.lucene.spatial.CartesianShapeIndexer; +import org.elasticsearch.lucene.spatial.CoordinateEncoder; import org.elasticsearch.script.field.AbstractScriptFieldFactory; import org.elasticsearch.script.field.DocValuesScriptFieldFactory; import org.elasticsearch.script.field.Field; import org.elasticsearch.xpack.spatial.common.CartesianBoundingBox; import org.elasticsearch.xpack.spatial.common.CartesianPoint; import org.elasticsearch.xpack.spatial.index.fielddata.CartesianShapeValues; -import org.elasticsearch.xpack.spatial.index.fielddata.CoordinateEncoder; import org.elasticsearch.xpack.spatial.index.fielddata.plain.AbstractAtomicCartesianShapeFieldData; import org.elasticsearch.xpack.spatial.index.fielddata.plain.CartesianShapeIndexFieldData; import org.elasticsearch.xpack.spatial.index.query.ShapeQueryProcessor; diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryProcessor.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryProcessor.java index 4bb9e988c0f90..cd09b74e99591 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryProcessor.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryProcessor.java @@ -18,7 +18,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.SearchExecutionContext; -import org.elasticsearch.xpack.spatial.index.mapper.CartesianShapeDocValuesQuery; +import org.elasticsearch.lucene.spatial.CartesianShapeDocValuesQuery; import org.elasticsearch.xpack.spatial.index.mapper.ShapeFieldMapper; public class ShapeQueryProcessor { diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexVisitor.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexVisitor.java index 3bd991a87bdac..5deb653ffc125 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexVisitor.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexVisitor.java @@ -10,14 +10,14 @@ import org.apache.lucene.geo.Component2D; import org.apache.lucene.geo.GeoUtils; import org.apache.lucene.util.ArrayUtil; +import org.elasticsearch.lucene.spatial.CoordinateEncoder; import org.elasticsearch.xpack.spatial.common.H3CartesianUtil; -import org.elasticsearch.xpack.spatial.index.fielddata.CoordinateEncoder; import org.elasticsearch.xpack.spatial.index.fielddata.GeoRelation; -import static org.elasticsearch.xpack.spatial.index.fielddata.TriangleTreeVisitor.TriangleTreeDecodedVisitor; -import static org.elasticsearch.xpack.spatial.index.fielddata.TriangleTreeVisitor.abFromTriangle; -import static org.elasticsearch.xpack.spatial.index.fielddata.TriangleTreeVisitor.bcFromTriangle; -import static org.elasticsearch.xpack.spatial.index.fielddata.TriangleTreeVisitor.caFromTriangle; +import static org.elasticsearch.lucene.spatial.TriangleTreeVisitor.TriangleTreeDecodedVisitor; +import static org.elasticsearch.lucene.spatial.TriangleTreeVisitor.abFromTriangle; +import static org.elasticsearch.lucene.spatial.TriangleTreeVisitor.bcFromTriangle; +import static org.elasticsearch.lucene.spatial.TriangleTreeVisitor.caFromTriangle; /** * A reusable tree reader visitor for a previous serialized {@link org.elasticsearch.geometry.Geometry}. diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianShapeCentroidAggregator.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianShapeCentroidAggregator.java index d08464ffa6494..52e3413d96f5f 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianShapeCentroidAggregator.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianShapeCentroidAggregator.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.util.DoubleArray; import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Releasables; +import org.elasticsearch.lucene.spatial.DimensionalShapeType; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.InternalAggregation; @@ -23,7 +24,6 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.xpack.spatial.common.CartesianPoint; import org.elasticsearch.xpack.spatial.index.fielddata.CartesianShapeValues; -import org.elasticsearch.xpack.spatial.index.fielddata.DimensionalShapeType; import org.elasticsearch.xpack.spatial.search.aggregations.support.CartesianShapeValuesSource; import java.io.IOException; diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeCentroidAggregator.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeCentroidAggregator.java index 0a2e073d66eda..416ae104dd11d 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeCentroidAggregator.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeCentroidAggregator.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.util.DoubleArray; import org.elasticsearch.common.util.LongArray; import org.elasticsearch.core.Releasables; +import org.elasticsearch.lucene.spatial.DimensionalShapeType; import org.elasticsearch.search.aggregations.AggregationExecutionContext; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.InternalAggregation; @@ -22,7 +23,6 @@ import org.elasticsearch.search.aggregations.metrics.MetricsAggregator; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; -import org.elasticsearch.xpack.spatial.index.fielddata.DimensionalShapeType; import org.elasticsearch.xpack.spatial.index.fielddata.GeoShapeValues; import org.elasticsearch.xpack.spatial.search.aggregations.support.GeoShapeValuesSource; diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/runtime/GeoShapeScriptFieldGeoShapeQuery.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/runtime/GeoShapeScriptFieldGeoShapeQuery.java index ab591aaa3b209..f79f8b94dd5fa 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/runtime/GeoShapeScriptFieldGeoShapeQuery.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/runtime/GeoShapeScriptFieldGeoShapeQuery.java @@ -14,13 +14,13 @@ import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.index.mapper.GeoShapeIndexer; +import org.elasticsearch.lucene.spatial.CentroidCalculator; +import org.elasticsearch.lucene.spatial.Component2DVisitor; +import org.elasticsearch.lucene.spatial.CoordinateEncoder; +import org.elasticsearch.lucene.spatial.GeometryDocValueReader; +import org.elasticsearch.lucene.spatial.GeometryDocValueWriter; import org.elasticsearch.script.GeometryFieldScript; import org.elasticsearch.script.Script; -import org.elasticsearch.xpack.spatial.index.fielddata.CentroidCalculator; -import org.elasticsearch.xpack.spatial.index.fielddata.Component2DVisitor; -import org.elasticsearch.xpack.spatial.index.fielddata.CoordinateEncoder; -import org.elasticsearch.xpack.spatial.index.fielddata.GeometryDocValueReader; -import org.elasticsearch.xpack.spatial.index.fielddata.GeometryDocValueWriter; import java.io.IOException; import java.io.UncheckedIOException; diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoCentroidCalculatorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoCentroidCalculatorExtraTests.java similarity index 61% rename from x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoCentroidCalculatorTests.java rename to x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoCentroidCalculatorExtraTests.java index 9c1be4678df0c..004e152175765 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoCentroidCalculatorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/GeoCentroidCalculatorExtraTests.java @@ -10,12 +10,7 @@ import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geometry.Line; -import org.elasticsearch.geometry.MultiLine; -import org.elasticsearch.geometry.MultiPoint; -import org.elasticsearch.geometry.MultiPolygon; -import org.elasticsearch.geometry.Point; -import org.elasticsearch.geometry.Polygon; -import org.elasticsearch.geometry.Rectangle; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.spatial.util.GeoTestUtils; import java.io.IOException; @@ -23,35 +18,11 @@ import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; -public class GeoCentroidCalculatorTests extends CentroidCalculatorTests { - protected Point randomPoint() { - return GeometryTestUtils.randomPoint(false); - } - - protected MultiPoint randomMultiPoint() { - return GeometryTestUtils.randomMultiPoint(false); - } - - protected Line randomLine() { - return GeometryTestUtils.randomLine(false); - } - - protected MultiLine randomMultiLine() { - return GeometryTestUtils.randomMultiLine(false); - } - - protected Polygon randomPolygon() { - return GeometryTestUtils.randomPolygon(false); - } - - protected MultiPolygon randomMultiPolygon() { - return GeometryTestUtils.randomMultiPolygon(false); - } - - protected Rectangle randomRectangle() { - return GeometryTestUtils.randomRectangle(); - } - +/** + * When the GeoCentroidCalculatorTests were moved to server, as part of the work to move the doc-values components needed by ESQL to server, + * this test class was split in two, most moving to server, but one test remaining in xpack.spatial because it depends on GeoShapeValues. + */ +public class GeoCentroidCalculatorExtraTests extends ESTestCase { protected double randomY() { return GeometryTestUtils.randomLat(); } @@ -60,13 +31,6 @@ protected double randomX() { return GeometryTestUtils.randomLon(); } - @Override - protected boolean ignoreAreaErrors() { - // Tests that calculate polygon areas with very large double values can have very large errors for flat polygons - // This would not happen in the tightly bounded case of geo-data, but for cartesian test data it happens a lot. - return false; - } - public void testRoundingErrorAndNormalization() throws IOException { double lonA = randomX(); double latA = randomY(); diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/GeometryDocValueTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/GeometryDocValueTests.java index 50e505b58cb33..55988e72a2383 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/GeometryDocValueTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/GeometryDocValueTests.java @@ -22,6 +22,10 @@ import org.elasticsearch.geometry.ShapeType; import org.elasticsearch.geometry.utils.StandardValidator; import org.elasticsearch.geometry.utils.WellKnownText; +import org.elasticsearch.lucene.spatial.CoordinateEncoder; +import org.elasticsearch.lucene.spatial.DimensionalShapeType; +import org.elasticsearch.lucene.spatial.Extent; +import org.elasticsearch.lucene.spatial.GeometryDocValueReader; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.spatial.util.GeoTestUtils; import org.hamcrest.BaseMatcher; diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/LatLonGeometryRelationVisitorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/LatLonGeometryRelationVisitorTests.java index 18f33ec0d6845..e35c804b568a6 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/LatLonGeometryRelationVisitorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/LatLonGeometryRelationVisitorTests.java @@ -26,6 +26,9 @@ import org.elasticsearch.geometry.MultiPoint; import org.elasticsearch.geometry.MultiPolygon; import org.elasticsearch.geometry.Rectangle; +import org.elasticsearch.lucene.spatial.Component2DVisitor; +import org.elasticsearch.lucene.spatial.CoordinateEncoder; +import org.elasticsearch.lucene.spatial.GeometryDocValueReader; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.spatial.util.GeoTestUtils; diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/TestCoordinateEncoder.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/TestCoordinateEncoder.java deleted file mode 100644 index 37b23dfe5ea51..0000000000000 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/TestCoordinateEncoder.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.spatial.index.fielddata; - -/** - * {@link CoordinateEncoder} used for tests that is an identity-encoder-decoder - */ -public class TestCoordinateEncoder implements CoordinateEncoder { - - public static final TestCoordinateEncoder INSTANCE = new TestCoordinateEncoder(); - - @Override - public int encodeX(double x) { - return (int) x; - } - - @Override - public int encodeY(double y) { - return (int) y; - } - - @Override - public double decodeX(int x) { - return x; - } - - @Override - public double decodeY(int y) { - return y; - } - - @Override - public double normalizeX(double x) { - return x; - } - - @Override - public double normalizeY(double y) { - return y; - } -} diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/Tile2DVisitorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/Tile2DVisitorTests.java index 959b91ffbc910..b5a83b2cdd658 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/Tile2DVisitorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/fielddata/Tile2DVisitorTests.java @@ -22,6 +22,9 @@ import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.Polygon; import org.elasticsearch.geometry.Rectangle; +import org.elasticsearch.lucene.spatial.CoordinateEncoder; +import org.elasticsearch.lucene.spatial.Extent; +import org.elasticsearch.lucene.spatial.GeometryDocValueReader; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.spatial.util.GeoTestUtils; @@ -287,4 +290,40 @@ static void assertRelation(GeometryDocValueReader reader, Extent extent, GeoRela reader.visit(tile2DVisitor); assertThat(tile2DVisitor.relation(), in(expectedRelation)); } + + private static class TestCoordinateEncoder implements CoordinateEncoder { + + private static final TestCoordinateEncoder INSTANCE = new TestCoordinateEncoder(); + + @Override + public int encodeX(double x) { + return (int) x; + } + + @Override + public int encodeY(double y) { + return (int) y; + } + + @Override + public double decodeX(int x) { + return x; + } + + @Override + public double decodeY(int y) { + return y; + } + + @Override + public double normalizeX(double x) { + return x; + } + + @Override + public double normalizeY(double y) { + return y; + } + } + } diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeScriptMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeScriptMapperTests.java index 0cea1b1054130..a125bbc2c07af 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeScriptMapperTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeScriptMapperTests.java @@ -16,13 +16,13 @@ import org.elasticsearch.geometry.Point; import org.elasticsearch.index.mapper.MapperScriptTestCase; import org.elasticsearch.index.mapper.OnScriptError; +import org.elasticsearch.lucene.spatial.CentroidCalculator; +import org.elasticsearch.lucene.spatial.CoordinateEncoder; +import org.elasticsearch.lucene.spatial.GeometryDocValueWriter; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.GeometryFieldScript; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.xpack.spatial.LocalStateSpatialPlugin; -import org.elasticsearch.xpack.spatial.index.fielddata.CentroidCalculator; -import org.elasticsearch.xpack.spatial.index.fielddata.CoordinateEncoder; -import org.elasticsearch.xpack.spatial.index.fielddata.GeometryDocValueWriter; import java.io.IOException; import java.io.UncheckedIOException; diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/ingest/CircleProcessorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/ingest/CircleProcessorTests.java index 1068e3f6505f8..20b1a906b1dab 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/ingest/CircleProcessorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/ingest/CircleProcessorTests.java @@ -32,12 +32,12 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.TestIngestDocument; +import org.elasticsearch.lucene.spatial.CartesianShapeIndexer; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.spatial.index.mapper.CartesianShapeIndexer; import org.elasticsearch.xpack.spatial.index.mapper.GeoShapeWithDocValuesFieldMapper.GeoShapeWithDocValuesFieldType; import org.elasticsearch.xpack.spatial.index.mapper.ShapeFieldMapper.ShapeFieldType; import org.elasticsearch.xpack.spatial.index.query.ShapeQueryProcessor; diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexVisitorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexVisitorTests.java index 3e0ec0fc9b769..8e2f713e6ed3e 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexVisitorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexVisitorTests.java @@ -22,10 +22,10 @@ import org.elasticsearch.geometry.Rectangle; import org.elasticsearch.h3.H3; import org.elasticsearch.h3.LatLng; +import org.elasticsearch.lucene.spatial.CoordinateEncoder; +import org.elasticsearch.lucene.spatial.GeometryDocValueReader; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.spatial.index.fielddata.CoordinateEncoder; import org.elasticsearch.xpack.spatial.index.fielddata.GeoRelation; -import org.elasticsearch.xpack.spatial.index.fielddata.GeometryDocValueReader; import org.elasticsearch.xpack.spatial.util.GeoTestUtils; import java.io.IOException; diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoShapeGeoGridTestCase.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoShapeGeoGridTestCase.java index 6e39f2affe352..834d04e49014c 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoShapeGeoGridTestCase.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoShapeGeoGridTestCase.java @@ -21,6 +21,7 @@ import org.elasticsearch.geometry.MultiPoint; import org.elasticsearch.geometry.Point; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.lucene.spatial.BinaryShapeDocValuesField; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -33,7 +34,6 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.elasticsearch.xpack.spatial.LocalStateSpatialPlugin; import org.elasticsearch.xpack.spatial.index.fielddata.GeoShapeValues; -import org.elasticsearch.xpack.spatial.index.mapper.BinaryShapeDocValuesField; import org.elasticsearch.xpack.spatial.index.mapper.GeoShapeWithDocValuesFieldMapper.GeoShapeWithDocValuesFieldType; import org.elasticsearch.xpack.spatial.search.aggregations.support.GeoShapeValuesSourceType; import org.elasticsearch.xpack.spatial.util.GeoTestUtils; diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianShapeCentroidAggregatorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianShapeCentroidAggregatorTests.java index 8ade6d8e5695a..7d4ee29ffee0f 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianShapeCentroidAggregatorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianShapeCentroidAggregatorTests.java @@ -17,6 +17,8 @@ import org.elasticsearch.geo.ShapeTestUtils; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.lucene.spatial.CentroidCalculator; +import org.elasticsearch.lucene.spatial.DimensionalShapeType; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -26,8 +28,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.spatial.LocalStateSpatialPlugin; import org.elasticsearch.xpack.spatial.common.CartesianPoint; -import org.elasticsearch.xpack.spatial.index.fielddata.CentroidCalculator; -import org.elasticsearch.xpack.spatial.index.fielddata.DimensionalShapeType; import org.elasticsearch.xpack.spatial.index.mapper.ShapeFieldMapper; import org.elasticsearch.xpack.spatial.search.aggregations.support.CartesianPointValuesSourceType; import org.elasticsearch.xpack.spatial.search.aggregations.support.CartesianShapeValuesSourceType; diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeCentroidAggregatorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeCentroidAggregatorTests.java index a4673bf6af86c..86ce455d372e7 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeCentroidAggregatorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeCentroidAggregatorTests.java @@ -20,6 +20,8 @@ import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.lucene.spatial.CentroidCalculator; +import org.elasticsearch.lucene.spatial.DimensionalShapeType; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; @@ -31,8 +33,6 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.spatial.LocalStateSpatialPlugin; -import org.elasticsearch.xpack.spatial.index.fielddata.CentroidCalculator; -import org.elasticsearch.xpack.spatial.index.fielddata.DimensionalShapeType; import org.elasticsearch.xpack.spatial.index.mapper.GeoShapeWithDocValuesFieldMapper.GeoShapeWithDocValuesFieldType; import org.elasticsearch.xpack.spatial.search.aggregations.support.GeoShapeValuesSourceType; import org.elasticsearch.xpack.spatial.util.GeoTestUtils; diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/util/GeoTestUtils.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/util/GeoTestUtils.java index 4e20e872ac446..ad5d5b5d89a37 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/util/GeoTestUtils.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/util/GeoTestUtils.java @@ -21,6 +21,12 @@ import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.Rectangle; import org.elasticsearch.index.mapper.GeoShapeIndexer; +import org.elasticsearch.lucene.spatial.BinaryShapeDocValuesField; +import org.elasticsearch.lucene.spatial.CartesianShapeIndexer; +import org.elasticsearch.lucene.spatial.CentroidCalculator; +import org.elasticsearch.lucene.spatial.CoordinateEncoder; +import org.elasticsearch.lucene.spatial.GeometryDocValueReader; +import org.elasticsearch.lucene.spatial.GeometryDocValueWriter; import org.elasticsearch.xcontent.DeprecationHandler; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ToXContent; @@ -29,13 +35,7 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.spatial.index.fielddata.CartesianShapeValues; -import org.elasticsearch.xpack.spatial.index.fielddata.CentroidCalculator; -import org.elasticsearch.xpack.spatial.index.fielddata.CoordinateEncoder; import org.elasticsearch.xpack.spatial.index.fielddata.GeoShapeValues; -import org.elasticsearch.xpack.spatial.index.fielddata.GeometryDocValueReader; -import org.elasticsearch.xpack.spatial.index.fielddata.GeometryDocValueWriter; -import org.elasticsearch.xpack.spatial.index.mapper.BinaryShapeDocValuesField; -import org.elasticsearch.xpack.spatial.index.mapper.CartesianShapeIndexer; import java.io.IOException; From 64891011d3934938c0fa68f5c43509ffc47fd129 Mon Sep 17 00:00:00 2001 From: Niels Bauman <33722607+nielsbauman@users.noreply.github.com> Date: Wed, 7 Feb 2024 15:18:55 +0100 Subject: [PATCH 105/106] Extend `repository_integrity` health indicator for unknown and invalid repos (#104614) This PR extends the repository integrity health indicator to cover also unknown and invalid repositories. Because these errors are local to a node, we extend the `LocalHealthMonitor` to monitor the repositories and report the changes in their health regarding the unknown or invalid status. To simplify this extension in the future, we introduce the `HealthTracker` abstract class that can be used to create new local health checks. Furthermore, we change the severity of the health status when the repository integrity indicator reports unhealthy from `RED` to `YELLOW` because even though this is a serious issue, there is no user impact yet. --- docs/changelog/104614.yaml | 6 + .../repeated-snapshot-failures.asciidoc | 4 +- docs/reference/troubleshooting.asciidoc | 2 +- .../snapshot/add-repository.asciidoc | 39 +- ...mLifecycleHealthIndicatorServiceTests.java | 11 +- ...sAvailabilityHealthIndicatorServiceIT.java | 2 +- ...toryIntegrityHealthIndicatorServiceIT.java | 4 +- server/src/main/java/module-info.java | 1 + .../org/elasticsearch/TransportVersions.java | 1 + .../health/node/DiskHealthInfo.java | 2 +- .../elasticsearch/health/node/HealthInfo.java | 19 +- .../health/node/HealthInfoCache.java | 13 +- .../health/node/LocalHealthMonitor.java | 243 ++++-------- .../health/node/RepositoriesHealthInfo.java | 32 ++ .../node/UpdateHealthInfoCacheAction.java | 77 +++- .../node/tracker/DiskHealthTracker.java | 136 +++++++ .../health/node/tracker/HealthTracker.java | 105 ++++++ .../tracker/RepositoriesHealthTracker.java | 58 +++ .../elasticsearch/node/NodeConstruction.java | 18 +- ...sitoryIntegrityHealthIndicatorService.java | 268 ++++++++++---- .../health/HealthServiceTests.java | 15 +- .../node/DiskHealthIndicatorServiceTests.java | 4 +- .../node/FetchHealthInfoCacheActionTests.java | 29 +- .../health/node/HealthInfoCacheTests.java | 20 +- .../health/node/HealthInfoTests.java | 98 +++-- .../health/node/LocalHealthMonitorTests.java | 350 +++--------------- .../UpdateHealthInfoCacheActionTests.java | 32 +- .../node/tracker/DiskHealthTrackerTests.java | 331 +++++++++++++++++ .../RepositoriesHealthTrackerTests.java | 112 ++++++ ...yIntegrityHealthIndicatorServiceTests.java | 332 +++++++++++++---- 30 files changed, 1609 insertions(+), 755 deletions(-) create mode 100644 docs/changelog/104614.yaml create mode 100644 server/src/main/java/org/elasticsearch/health/node/RepositoriesHealthInfo.java create mode 100644 server/src/main/java/org/elasticsearch/health/node/tracker/DiskHealthTracker.java create mode 100644 server/src/main/java/org/elasticsearch/health/node/tracker/HealthTracker.java create mode 100644 server/src/main/java/org/elasticsearch/health/node/tracker/RepositoriesHealthTracker.java create mode 100644 server/src/test/java/org/elasticsearch/health/node/tracker/DiskHealthTrackerTests.java create mode 100644 server/src/test/java/org/elasticsearch/health/node/tracker/RepositoriesHealthTrackerTests.java diff --git a/docs/changelog/104614.yaml b/docs/changelog/104614.yaml new file mode 100644 index 0000000000000..9b2c25a643825 --- /dev/null +++ b/docs/changelog/104614.yaml @@ -0,0 +1,6 @@ +pr: 104614 +summary: Extend `repository_integrity` health indicator for unknown and invalid repos +area: Health +type: enhancement +issues: + - 103784 diff --git a/docs/reference/tab-widgets/troubleshooting/snapshot/repeated-snapshot-failures.asciidoc b/docs/reference/tab-widgets/troubleshooting/snapshot/repeated-snapshot-failures.asciidoc index ceb282a3966f5..a3910675b1632 100644 --- a/docs/reference/tab-widgets/troubleshooting/snapshot/repeated-snapshot-failures.asciidoc +++ b/docs/reference/tab-widgets/troubleshooting/snapshot/repeated-snapshot-failures.asciidoc @@ -89,7 +89,7 @@ https://www.elastic.co/guide/en/cloud-enterprise/current/ece-manage-repositories if you are using such a deployment. One common failure scenario is repository corruption. This occurs most often when multiple instances of {es} write to -the same repository location. There is a <> to fix this problem. +the same repository location. There is a <> to fix this problem. In the event that snapshots are failing for other reasons check the logs on the elected master node during the snapshot execution period for more information. @@ -163,7 +163,7 @@ Snapshots can fail for a variety reasons. If the failures are due to configurati documentation for the repository that the automated snapshots are using. One common failure scenario is repository corruption. This occurs most often when multiple instances of {es} write to -the same repository location. There is a <> to fix this problem. +the same repository location. There is a <> to fix this problem. In the event that snapshots are failing for other reasons check the logs on the elected master node during the snapshot execution period for more information. diff --git a/docs/reference/troubleshooting.asciidoc b/docs/reference/troubleshooting.asciidoc index de1f9e6c7a608..64df699d33638 100644 --- a/docs/reference/troubleshooting.asciidoc +++ b/docs/reference/troubleshooting.asciidoc @@ -43,7 +43,7 @@ fix problems that an {es} deployment might encounter. [[troubleshooting-snapshot]] === Snapshot and restore * <> -* <> +* <> * <> [discrete] diff --git a/docs/reference/troubleshooting/snapshot/add-repository.asciidoc b/docs/reference/troubleshooting/snapshot/add-repository.asciidoc index dc2ce5a4bc252..0de4667bd9688 100644 --- a/docs/reference/troubleshooting/snapshot/add-repository.asciidoc +++ b/docs/reference/troubleshooting/snapshot/add-repository.asciidoc @@ -1,8 +1,15 @@ [[add-repository]] -== Multiple deployments writing to the same snapshot repository +== Troubleshooting broken repositories -Multiple {es} deployments are writing to the same snapshot repository. {es} doesn't -support this configuration and only one cluster is allowed to write to the same +There are several situations where the <> might report an issue +regarding the integrity of snapshot repositories in the cluster. This page explains +the recommended actions for diagnosing corrupted, unknown, and invalid repositories. + +[[diagnosing-corrupted-repositories]] +=== Diagnosing corrupted repositories + +Multiple {es} deployments are writing to the same snapshot repository. {es} doesn't +support this configuration and only one cluster is allowed to write to the same repository. See <> for potential side-effects of corruption of the repository contents, which may not be resolved by the following guide. @@ -11,3 +18,29 @@ other deployments, and re-add (recreate) the repository in the current deploymen include::{es-repo-dir}/tab-widgets/troubleshooting/snapshot/corrupt-repository-widget.asciidoc[] + +[[diagnosing-unknown-repositories]] +=== Diagnosing unknown repositories + +When a snapshot repository is marked as "unknown", it means that an {es} node is +unable to instantiate the repository due to an unknown repository type. This is +usually caused by a missing plugin on the node. Make sure each node in the cluster +has the required plugins by following the following steps: + +1. Retrieve the affected nodes from the affected resources section of the health report. +2. Use the <> to retrieve the plugins installed on each node. +3. Cross reference this with a node that works correctly to find out which plugins are missing +and install the missing plugins. + + +[[diagnosing-invalid-repositories]] +=== Diagnosing invalid repositories + +When an {es} node faces an unexpected exception when trying to instantiate a snapshot +repository, it will mark the repository as "invalid" and write a warning to the log file. +Use the following steps to diagnose the underlying cause of this issue: + +1. Retrieve the affected nodes from the affected resources section of the health report. +2. Refer to the logs of the affected node(s) and search for the repository name. +You should be able to find logs that will contain relevant exception. +3. Try to resolve the errors reported. diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthIndicatorServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthIndicatorServiceTests.java index 877b463301311..4461e2ffb7f02 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthIndicatorServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/health/DataStreamLifecycleHealthIndicatorServiceTests.java @@ -41,7 +41,7 @@ public void setupService() { } public void testGreenWhenNoDSLHealthData() { - HealthIndicatorResult result = service.calculate(true, new HealthInfo(Map.of(), null)); + HealthIndicatorResult result = service.calculate(true, constructHealthInfo(null)); assertThat(result.status(), is(HealthStatus.GREEN)); assertThat( result.symptom(), @@ -53,7 +53,7 @@ public void testGreenWhenNoDSLHealthData() { } public void testGreenWhenEmptyListOfStagnatingIndices() { - HealthIndicatorResult result = service.calculate(true, new HealthInfo(Map.of(), new DataStreamLifecycleHealthInfo(List.of(), 15))); + HealthIndicatorResult result = service.calculate(true, constructHealthInfo(new DataStreamLifecycleHealthInfo(List.of(), 15))); assertThat(result.status(), is(HealthStatus.GREEN)); assertThat(result.symptom(), is("Data streams are executing their lifecycles without issues")); assertThat(result.details(), is(not(HealthIndicatorDetails.EMPTY))); @@ -67,8 +67,7 @@ public void testYellowWhenStagnatingIndicesPresent() { String firstGenerationIndex = DataStream.getDefaultBackingIndexName("foo", 1L); HealthIndicatorResult result = service.calculate( true, - new HealthInfo( - Map.of(), + constructHealthInfo( new DataStreamLifecycleHealthInfo( List.of(new DslErrorInfo(secondGenerationIndex, 1L, 200), new DslErrorInfo(firstGenerationIndex, 3L, 100)), 15 @@ -99,4 +98,8 @@ public void testYellowWhenStagnatingIndicesPresent() { assertThat(diagnosis.definition(), is(STAGNATING_BACKING_INDICES_DIAGNOSIS_DEF)); assertThat(diagnosis.affectedResources().get(0).getValues(), containsInAnyOrder(secondGenerationIndex, firstGenerationIndex)); } + + private HealthInfo constructHealthInfo(DataStreamLifecycleHealthInfo dslHealthInfo) { + return new HealthInfo(Map.of(), dslHealthInfo, Map.of()); + } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/shards/ShardsAvailabilityHealthIndicatorServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/shards/ShardsAvailabilityHealthIndicatorServiceIT.java index 92194a94ab44c..8afcaccaf9e77 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/shards/ShardsAvailabilityHealthIndicatorServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/shards/ShardsAvailabilityHealthIndicatorServiceIT.java @@ -135,7 +135,7 @@ public void clusterChanged(ClusterChangedEvent event) { states.add( new RoutingNodesAndHealth( event.state().getRoutingNodes(), - service.calculate(false, 1, new HealthInfo(Map.of(), DataStreamLifecycleHealthInfo.NO_DSL_ERRORS)) + service.calculate(false, 1, new HealthInfo(Map.of(), DataStreamLifecycleHealthInfo.NO_DSL_ERRORS, Map.of())) ) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceIT.java index 80b9c437a5ddb..1a54df1f85ed6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceIT.java @@ -21,7 +21,7 @@ import java.nio.file.Path; import static org.elasticsearch.health.HealthStatus.GREEN; -import static org.elasticsearch.health.HealthStatus.RED; +import static org.elasticsearch.health.HealthStatus.YELLOW; import static org.elasticsearch.snapshots.RepositoryIntegrityHealthIndicatorService.NAME; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -61,7 +61,7 @@ public void testRepositoryIntegrityHealthIndicator() throws IOException, Interru containsString("[" + repository + "] The repository has been disabled to prevent data corruption") ); - assertSnapshotRepositoryHealth("Indicator should be red after file is deleted from the repository", client, RED); + assertSnapshotRepositoryHealth("Indicator should be yellow after file is deleted from the repository", client, YELLOW); deleteRepository(repository); } diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index f099852a8f428..0fb2e522db167 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -228,6 +228,7 @@ exports org.elasticsearch.gateway; exports org.elasticsearch.health; exports org.elasticsearch.health.node; + exports org.elasticsearch.health.node.tracker; exports org.elasticsearch.health.node.selection; exports org.elasticsearch.health.stats; exports org.elasticsearch.http; diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 7d7f7b66356d0..27091e4ae3d32 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -172,6 +172,7 @@ static TransportVersion def(int id) { public static final TransportVersion NLP_DOCUMENT_CHUNKING_ADDED = def(8_585_00_0); public static final TransportVersion SEARCH_TIMEOUT_EXCEPTION_ADDED = def(8_586_00_0); public static final TransportVersion ML_TEXT_EMBEDDING_INFERENCE_SERVICE_ADDED = def(8_587_00_0); + public static final TransportVersion HEALTH_INFO_ENRICHED_WITH_REPOS = def(8_588_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/health/node/DiskHealthInfo.java b/server/src/main/java/org/elasticsearch/health/node/DiskHealthInfo.java index fdda0a260068c..f1e085482b72a 100644 --- a/server/src/main/java/org/elasticsearch/health/node/DiskHealthInfo.java +++ b/server/src/main/java/org/elasticsearch/health/node/DiskHealthInfo.java @@ -20,7 +20,7 @@ * The health status of the disk space of this node along with the cause. */ public record DiskHealthInfo(HealthStatus healthStatus, @Nullable Cause cause) implements Writeable { - DiskHealthInfo(HealthStatus healthStatus) { + public DiskHealthInfo(HealthStatus healthStatus) { this(healthStatus, null); } diff --git a/server/src/main/java/org/elasticsearch/health/node/HealthInfo.java b/server/src/main/java/org/elasticsearch/health/node/HealthInfo.java index 0bb8027f8299d..97087c05e8de8 100644 --- a/server/src/main/java/org/elasticsearch/health/node/HealthInfo.java +++ b/server/src/main/java/org/elasticsearch/health/node/HealthInfo.java @@ -23,19 +23,25 @@ * This class wraps all the data returned by the health node. * @param diskInfoByNode A Map of node id to DiskHealthInfo for that node * @param dslHealthInfo The data stream lifecycle health information + * @param repositoriesInfoByNode A Map of node id to RepositoriesHealthInfo for that node */ -public record HealthInfo(Map diskInfoByNode, @Nullable DataStreamLifecycleHealthInfo dslHealthInfo) - implements - Writeable { +public record HealthInfo( + Map diskInfoByNode, + @Nullable DataStreamLifecycleHealthInfo dslHealthInfo, + Map repositoriesInfoByNode +) implements Writeable { - public static final HealthInfo EMPTY_HEALTH_INFO = new HealthInfo(Map.of(), NO_DSL_ERRORS); + public static final HealthInfo EMPTY_HEALTH_INFO = new HealthInfo(Map.of(), NO_DSL_ERRORS, Map.of()); public HealthInfo(StreamInput input) throws IOException { this( input.readMap(DiskHealthInfo::new), input.getTransportVersion().onOrAfter(TransportVersions.HEALTH_INFO_ENRICHED_WITH_DSL_STATUS) ? input.readOptionalWriteable(DataStreamLifecycleHealthInfo::new) - : null + : null, + input.getTransportVersion().onOrAfter(TransportVersions.HEALTH_INFO_ENRICHED_WITH_REPOS) + ? input.readMap(RepositoriesHealthInfo::new) + : Map.of() ); } @@ -45,5 +51,8 @@ public void writeTo(StreamOutput output) throws IOException { if (output.getTransportVersion().onOrAfter(TransportVersions.HEALTH_INFO_ENRICHED_WITH_DSL_STATUS)) { output.writeOptionalWriteable(dslHealthInfo); } + if (output.getTransportVersion().onOrAfter(TransportVersions.HEALTH_INFO_ENRICHED_WITH_REPOS)) { + output.writeMap(repositoriesInfoByNode, StreamOutput::writeWriteable); + } } } diff --git a/server/src/main/java/org/elasticsearch/health/node/HealthInfoCache.java b/server/src/main/java/org/elasticsearch/health/node/HealthInfoCache.java index 986b5e13dce6e..58ac3b03dd964 100644 --- a/server/src/main/java/org/elasticsearch/health/node/HealthInfoCache.java +++ b/server/src/main/java/org/elasticsearch/health/node/HealthInfoCache.java @@ -29,6 +29,7 @@ public class HealthInfoCache implements ClusterStateListener { private volatile ConcurrentHashMap diskInfoByNode = new ConcurrentHashMap<>(); @Nullable private volatile DataStreamLifecycleHealthInfo dslHealthInfo = null; + private volatile ConcurrentHashMap repositoriesInfoByNode = new ConcurrentHashMap<>(); private HealthInfoCache() {} @@ -41,7 +42,8 @@ public static HealthInfoCache create(ClusterService clusterService) { public void updateNodeHealth( String nodeId, @Nullable DiskHealthInfo diskHealthInfo, - @Nullable DataStreamLifecycleHealthInfo latestDslHealthInfo + @Nullable DataStreamLifecycleHealthInfo latestDslHealthInfo, + @Nullable RepositoriesHealthInfo repositoriesHealthInfo ) { if (diskHealthInfo != null) { diskInfoByNode.put(nodeId, diskHealthInfo); @@ -49,6 +51,9 @@ public void updateNodeHealth( if (latestDslHealthInfo != null) { dslHealthInfo = latestDslHealthInfo; } + if (repositoriesHealthInfo != null) { + repositoriesInfoByNode.put(nodeId, repositoriesHealthInfo); + } } @Override @@ -59,16 +64,18 @@ public void clusterChanged(ClusterChangedEvent event) { if (event.nodesRemoved()) { for (DiscoveryNode removedNode : event.nodesDelta().removedNodes()) { diskInfoByNode.remove(removedNode.getId()); + repositoriesInfoByNode.remove(removedNode.getId()); } } // Resetting the cache is not synchronized for efficiency and simplicity. // Processing a delayed update after the cache has been emptied because // the node is not the health node anymore has small impact since it will // be reset in the next round again. - } else if (diskInfoByNode.isEmpty() == false) { + } else if (diskInfoByNode.isEmpty() == false || dslHealthInfo != null || repositoriesInfoByNode.isEmpty() == false) { logger.debug("Node [{}][{}] is no longer the health node, emptying the cache.", localNode.getName(), localNode.getId()); diskInfoByNode = new ConcurrentHashMap<>(); dslHealthInfo = null; + repositoriesInfoByNode = new ConcurrentHashMap<>(); } } @@ -78,6 +85,6 @@ public void clusterChanged(ClusterChangedEvent event) { */ public HealthInfo getHealthInfo() { // A shallow copy is enough because the inner data is immutable. - return new HealthInfo(Map.copyOf(diskInfoByNode), dslHealthInfo); + return new HealthInfo(Map.copyOf(diskInfoByNode), dslHealthInfo, Map.copyOf(repositoriesInfoByNode)); } } diff --git a/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java b/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java index 94cd518051199..d5d336b88b8ad 100644 --- a/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java +++ b/server/src/main/java/org/elasticsearch/health/node/LocalHealthMonitor.java @@ -11,51 +11,40 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; -import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.cluster.DiskUsage; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodeRole; -import org.elasticsearch.cluster.routing.RoutingNode; -import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.RunOnce; -import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.features.FeatureService; import org.elasticsearch.health.HealthFeatures; -import org.elasticsearch.health.HealthStatus; import org.elasticsearch.health.metadata.HealthMetadata; import org.elasticsearch.health.node.action.HealthNodeNotDiscoveredException; import org.elasticsearch.health.node.selection.HealthNode; import org.elasticsearch.health.node.selection.HealthNodeTaskExecutor; -import org.elasticsearch.node.NodeService; +import org.elasticsearch.health.node.tracker.HealthTracker; import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.NodeNotConnectedException; +import java.util.List; import java.util.Objects; -import java.util.Set; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.core.Strings.format; /** - * This class monitors the health of the node regarding the load on several resources. - * Currently, it only checks for available disk space. Furthermore, it informs the health - * node about the local health upon change or when a new node is detected or when the - * master node changed. + * This class monitors the local health of the node, such as the load and any errors that can be specific to a node + * (as opposed to errors that are cluster-wide). It informs the health node about the local health upon change, or + * when a new node is detected, or when the master node changed. */ public class LocalHealthMonitor implements ClusterStateListener { @@ -71,7 +60,6 @@ public class LocalHealthMonitor implements ClusterStateListener { private final ClusterService clusterService; private final ThreadPool threadPool; - private final DiskCheck diskCheck; private final Client client; private final FeatureService featureService; @@ -81,10 +69,10 @@ public class LocalHealthMonitor implements ClusterStateListener { // Signals that all the prerequisites have been fulfilled and the monitoring can be started. private volatile boolean prerequisitesFulfilled; - // Keeps the latest health state that was successfully reported to the current health node. - private final AtomicReference lastReportedDiskHealthInfo = new AtomicReference<>(); + // List of health trackers to be executed in each monitoring cycle. + private final List> healthTrackers; // Keeps the last seen health node. We use this variable to ensure that there wasn't a health node - // change between the time we send an update until the time we update the lastReportedDiskHealthInfo. + // change between the time we send an update until the time we record the last health state that was successfully reported. private final AtomicReference lastSeenHealthNode = new AtomicReference<>(); // Using a volatile reference to ensure that there is a single instance of monitoring running at all times. // No need for extra synchronization because all the writes are executed on the cluster applier thread. @@ -93,35 +81,35 @@ public class LocalHealthMonitor implements ClusterStateListener { private LocalHealthMonitor( Settings settings, ClusterService clusterService, - NodeService nodeService, ThreadPool threadPool, Client client, - FeatureService featureService + FeatureService featureService, + List> healthTrackers ) { this.threadPool = threadPool; this.monitorInterval = POLL_INTERVAL_SETTING.get(settings); this.enabled = HealthNodeTaskExecutor.ENABLED_SETTING.get(settings); this.clusterService = clusterService; this.client = client; - this.diskCheck = new DiskCheck(nodeService); this.featureService = featureService; + this.healthTrackers = healthTrackers; } public static LocalHealthMonitor create( Settings settings, ClusterService clusterService, - NodeService nodeService, ThreadPool threadPool, Client client, - FeatureService featureService + FeatureService featureService, + List> healthTrackers ) { LocalHealthMonitor localHealthMonitor = new LocalHealthMonitor( settings, clusterService, - nodeService, threadPool, client, - featureService + featureService, + healthTrackers ); localHealthMonitor.registerListeners(); return localHealthMonitor; @@ -164,15 +152,7 @@ private void stopMonitoring() { private void startMonitoringIfNecessary() { if (prerequisitesFulfilled && enabled) { if (isMonitorRunning() == false) { - monitoring = Monitoring.start( - monitorInterval, - threadPool, - lastReportedDiskHealthInfo, - lastSeenHealthNode, - diskCheck, - clusterService, - client - ); + monitoring = Monitoring.start(monitorInterval, threadPool, lastSeenHealthNode, healthTrackers, clusterService, client); logger.debug("Local health monitoring started {}", monitoring); } else { logger.trace("Local health monitoring already started {}, skipping", monitoring); @@ -195,7 +175,8 @@ public void clusterChanged(ClusterChangedEvent event) { // On health node or on master node changes, the health node might be reset so the reported // health info gets reset to null, to ensure it will be resent. lastSeenHealthNode.set(currentHealthNode == null ? null : currentHealthNode.getId()); - lastReportedDiskHealthInfo.set(null); + // Reset the reference of each HealthTracker. + healthTrackers.forEach(HealthTracker::reset); if (logger.isDebugEnabled()) { String reason; if (healthNodeChanged && masterNodeChanged) { @@ -242,11 +223,6 @@ private boolean hasHealthNodeChanged(DiscoveryNode currentHealthNode, ClusterCha || Objects.equals(previousHealthNode, currentHealthNode) == false; } - @Nullable - DiskHealthInfo getLastReportedDiskHealthInfo() { - return lastReportedDiskHealthInfo.get(); - } - /** * This class is responsible for running the health monitoring. It evaluates and checks the health info of this node * in the configured intervals. The first run happens upon initialization. If there is an exception, it will log it @@ -258,11 +234,10 @@ static class Monitoring implements Runnable, Scheduler.Cancellable { private final Executor executor; private final Scheduler scheduler; private final ClusterService clusterService; - private final DiskCheck diskCheck; private final Client client; - private final AtomicReference lastReportedDiskHealthInfo; private final AtomicReference lastSeenHealthNode; + private final List> healthTrackers; private volatile boolean cancelled = false; private volatile Scheduler.ScheduledCancellable scheduledRun; @@ -271,19 +246,17 @@ private Monitoring( TimeValue interval, Scheduler scheduler, Executor executor, - AtomicReference lastReportedDiskHealthInfo, AtomicReference lastSeenHealthNode, - DiskCheck diskCheck, + List> healthTrackers, ClusterService clusterService, Client client ) { this.interval = interval; this.executor = executor; this.scheduler = scheduler; - this.lastReportedDiskHealthInfo = lastReportedDiskHealthInfo; this.lastSeenHealthNode = lastSeenHealthNode; this.clusterService = clusterService; - this.diskCheck = diskCheck; + this.healthTrackers = healthTrackers; this.client = client; } @@ -293,9 +266,8 @@ private Monitoring( static Monitoring start( TimeValue interval, ThreadPool threadPool, - AtomicReference lastReportedDiskHealthInfo, AtomicReference lastSeenHealthNode, - DiskCheck diskCheck, + List> healthTrackers, ClusterService clusterService, Client client ) { @@ -303,9 +275,8 @@ static Monitoring start( interval, threadPool, threadPool.executor(ThreadPool.Names.MANAGEMENT), - lastReportedDiskHealthInfo, lastSeenHealthNode, - diskCheck, + healthTrackers, clusterService, client ); @@ -350,43 +321,31 @@ public void run() { boolean nextRunScheduled = false; Runnable scheduleNextRun = new RunOnce(this::scheduleNextRunIfNecessary); try { - ClusterState clusterState = clusterService.state(); - HealthMetadata healthMetadata = HealthMetadata.getFromClusterState(clusterState); - if (healthMetadata != null) { - DiskHealthInfo previousHealth = this.lastReportedDiskHealthInfo.get(); - DiskHealthInfo currentHealth = diskCheck.getHealth(healthMetadata, clusterState); - if (currentHealth.equals(previousHealth) == false) { - String nodeId = clusterService.localNode().getId(); - String healthNodeId = lastSeenHealthNode.get(); - ActionListener listener = ActionListener.wrap(response -> { - // Update the last reported value only if the health node hasn't changed. - if (Objects.equals(healthNodeId, lastSeenHealthNode.get()) - && lastReportedDiskHealthInfo.compareAndSet(previousHealth, currentHealth)) { - logger.debug( - "Health info [{}] successfully sent, last reported value: {}.", - currentHealth, - lastReportedDiskHealthInfo.get() - ); - } - }, e -> { - if (e.getCause() instanceof NodeNotConnectedException - || e.getCause() instanceof HealthNodeNotDiscoveredException) { - logger.debug("Failed to connect to the health node [{}], will try again.", e.getCause().getMessage()); - } else { - logger.debug( - () -> format("Failed to send health info [%s] to health node, will try again.", currentHealth), - e - ); - } - }); - client.execute( - UpdateHealthInfoCacheAction.INSTANCE, - new UpdateHealthInfoCacheAction.Request(nodeId, currentHealth), - ActionListener.runAfter(listener, scheduleNextRun) - ); - nextRunScheduled = true; - } + List> healthProgresses = getHealthProgresses(); + if (healthProgresses.isEmpty()) { + // Next run will still be scheduled in the `finally` block. + return; } + // Create builder and add the current value of each (changed) health tracker to the request. + var builder = new UpdateHealthInfoCacheAction.Request.Builder().nodeId(clusterService.localNode().getId()); + healthProgresses.forEach(changedHealthInfo -> changedHealthInfo.updateRequestBuilder(builder)); + + var healthNodeId = lastSeenHealthNode.get(); + var listener = ActionListener.wrap(response -> { + // Don't update the latest health info if the health node has changed while this request was being processed. + if (Objects.equals(healthNodeId, lastSeenHealthNode.get()) == false) { + return; + } + healthProgresses.forEach(HealthTracker.HealthProgress::recordProgressIfRelevant); + }, e -> { + if (e.getCause() instanceof NodeNotConnectedException || e.getCause() instanceof HealthNodeNotDiscoveredException) { + logger.debug("Failed to connect to the health node [{}], will try again.", e.getCause().getMessage()); + } else { + logger.debug(() -> format("Failed to send health info to health node, will try again."), e); + } + }); + client.execute(UpdateHealthInfoCacheAction.INSTANCE, builder.build(), ActionListener.runAfter(listener, scheduleNextRun)); + nextRunScheduled = true; } catch (Exception e) { logger.warn(() -> format("Failed to run scheduled health monitoring on thread pool [%s]", executor), e); } finally { @@ -397,6 +356,24 @@ public void run() { } } + /** + * Retrieve the current health of each tracker and return a list of the ones that have changed. + * + * @return a list of changed health info's. + */ + private List> getHealthProgresses() { + var healthMetadata = HealthMetadata.getFromClusterState(clusterService.state()); + // Don't try to run the health trackers if the HealthMetadata is not available. + if (healthMetadata == null) { + return List.of(); + } + + return healthTrackers.stream().>map(HealthTracker::trackHealth) + // Only return changed values. + .filter(HealthTracker.HealthProgress::hasChanged) + .toList(); + } + private void scheduleNextRunIfNecessary() { if (cancelled) { return; @@ -413,94 +390,4 @@ public String toString() { return "Monitoring{interval=" + interval + ", cancelled=" + cancelled + "}"; } } - - /** - * Determines the disk health of this node by checking if it exceeds the thresholds defined in the health metadata. - */ - static class DiskCheck { - private final NodeService nodeService; - - DiskCheck(NodeService nodeService) { - this.nodeService = nodeService; - } - - DiskHealthInfo getHealth(HealthMetadata healthMetadata, ClusterState clusterState) { - DiscoveryNode node = clusterState.getNodes().getLocalNode(); - HealthMetadata.Disk diskMetadata = healthMetadata.getDiskMetadata(); - DiskUsage usage = getDiskUsage(); - if (usage == null) { - return new DiskHealthInfo(HealthStatus.UNKNOWN, DiskHealthInfo.Cause.NODE_HAS_NO_DISK_STATS); - } - - ByteSizeValue totalBytes = ByteSizeValue.ofBytes(usage.totalBytes()); - - if (node.isDedicatedFrozenNode() || isDedicatedSearchNode(node)) { - long frozenFloodStageThreshold = diskMetadata.getFreeBytesFrozenFloodStageWatermark(totalBytes).getBytes(); - if (usage.freeBytes() < frozenFloodStageThreshold) { - logger.debug("Flood stage disk watermark [{}] exceeded on {}", frozenFloodStageThreshold, usage); - return new DiskHealthInfo(HealthStatus.RED, DiskHealthInfo.Cause.FROZEN_NODE_OVER_FLOOD_STAGE_THRESHOLD); - } - return new DiskHealthInfo(HealthStatus.GREEN); - } - long floodStageThreshold = diskMetadata.getFreeBytesFloodStageWatermark(totalBytes).getBytes(); - if (usage.freeBytes() < floodStageThreshold) { - logger.debug("Flood stage disk watermark [{}] exceeded on {}", floodStageThreshold, usage); - return new DiskHealthInfo(HealthStatus.RED, DiskHealthInfo.Cause.NODE_OVER_THE_FLOOD_STAGE_THRESHOLD); - } - - long highThreshold = diskMetadata.getFreeBytesHighWatermark(totalBytes).getBytes(); - if (usage.freeBytes() < highThreshold) { - if (node.canContainData()) { - // for data nodes only report YELLOW if shards can't move away from the node - if (DiskCheck.hasRelocatingShards(clusterState, node) == false) { - logger.debug("High disk watermark [{}] exceeded on {}", highThreshold, usage); - return new DiskHealthInfo(HealthStatus.YELLOW, DiskHealthInfo.Cause.NODE_OVER_HIGH_THRESHOLD); - } - } else { - // for non-data nodes report YELLOW when the disk high watermark is breached - logger.debug("High disk watermark [{}] exceeded on {}", highThreshold, usage); - return new DiskHealthInfo(HealthStatus.YELLOW, DiskHealthInfo.Cause.NODE_OVER_HIGH_THRESHOLD); - } - } - return new DiskHealthInfo(HealthStatus.GREEN); - } - - private static boolean isDedicatedSearchNode(DiscoveryNode node) { - Set roles = node.getRoles(); - return roles.contains(DiscoveryNodeRole.SEARCH_ROLE) - && roles.stream().filter(DiscoveryNodeRole::canContainData).anyMatch(r -> r != DiscoveryNodeRole.SEARCH_ROLE) == false; - } - - private DiskUsage getDiskUsage() { - NodeStats nodeStats = nodeService.stats( - CommonStatsFlags.NONE, - false, - false, - false, - false, - false, - true, - false, - false, - false, - false, - false, - false, - false, - false, - false, - false - ); - return DiskUsage.findLeastAvailablePath(nodeStats); - } - - static boolean hasRelocatingShards(ClusterState clusterState, DiscoveryNode node) { - RoutingNode routingNode = clusterState.getRoutingNodes().node(node.getId()); - if (routingNode == null) { - // routing node will be null for non-data nodes - return false; - } - return routingNode.numberOfShardsWithState(ShardRoutingState.RELOCATING) > 0; - } - } } diff --git a/server/src/main/java/org/elasticsearch/health/node/RepositoriesHealthInfo.java b/server/src/main/java/org/elasticsearch/health/node/RepositoriesHealthInfo.java new file mode 100644 index 0000000000000..ffbad943a783a --- /dev/null +++ b/server/src/main/java/org/elasticsearch/health/node/RepositoriesHealthInfo.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.health.node; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; +import java.util.List; + +/** + * Health info regarding repository health for a node. It refers to issues that are local to a node such as the unknown and + * invalid repositories. + */ +public record RepositoriesHealthInfo(List unknownRepositories, List invalidRepositories) implements Writeable { + public RepositoriesHealthInfo(StreamInput in) throws IOException { + this(in.readStringCollectionAsList(), in.readStringCollectionAsList()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeStringCollection(unknownRepositories); + out.writeStringCollection(invalidRepositories); + } +} diff --git a/server/src/main/java/org/elasticsearch/health/node/UpdateHealthInfoCacheAction.java b/server/src/main/java/org/elasticsearch/health/node/UpdateHealthInfoCacheAction.java index 9567331c678b5..bbbe84630f20a 100644 --- a/server/src/main/java/org/elasticsearch/health/node/UpdateHealthInfoCacheAction.java +++ b/server/src/main/java/org/elasticsearch/health/node/UpdateHealthInfoCacheAction.java @@ -27,6 +27,7 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; +import java.util.Locale; import java.util.Objects; /** @@ -42,16 +43,20 @@ public static class Request extends HealthNodeRequest { private final DiskHealthInfo diskHealthInfo; @Nullable private final DataStreamLifecycleHealthInfo dslHealthInfo; + @Nullable + private final RepositoriesHealthInfo repositoriesHealthInfo; - public Request(String nodeId, DiskHealthInfo diskHealthInfo) { + public Request(String nodeId, DiskHealthInfo diskHealthInfo, RepositoriesHealthInfo repositoriesHealthInfo) { this.nodeId = nodeId; this.diskHealthInfo = diskHealthInfo; + this.repositoriesHealthInfo = repositoriesHealthInfo; this.dslHealthInfo = null; } public Request(String nodeId, DataStreamLifecycleHealthInfo dslHealthInfo) { this.nodeId = nodeId; this.diskHealthInfo = null; + this.repositoriesHealthInfo = null; this.dslHealthInfo = dslHealthInfo; } @@ -61,6 +66,9 @@ public Request(StreamInput in) throws IOException { if (in.getTransportVersion().onOrAfter(TransportVersions.HEALTH_INFO_ENRICHED_WITH_DSL_STATUS)) { this.diskHealthInfo = in.readOptionalWriteable(DiskHealthInfo::new); this.dslHealthInfo = in.readOptionalWriteable(DataStreamLifecycleHealthInfo::new); + this.repositoriesHealthInfo = in.getTransportVersion().onOrAfter(TransportVersions.HEALTH_INFO_ENRICHED_WITH_REPOS) + ? in.readOptionalWriteable(RepositoriesHealthInfo::new) + : null; } else { // BWC for pre-8.12 the disk health info was mandatory. Evolving this request has proven tricky however we've made use of // waiting for all nodes to be on the {@link TransportVersions.HEALTH_INFO_ENRICHED_WITH_DSL_STATUS} transport version @@ -68,6 +76,7 @@ public Request(StreamInput in) throws IOException { // transport invariant of always having a disk health information in the request this.diskHealthInfo = new DiskHealthInfo(in); this.dslHealthInfo = null; + this.repositoriesHealthInfo = null; } } @@ -83,6 +92,10 @@ public DataStreamLifecycleHealthInfo getDslHealthInfo() { return dslHealthInfo; } + public RepositoriesHealthInfo getRepositoriesHealthInfo() { + return repositoriesHealthInfo; + } + @Override public ActionRequestValidationException validate() { return null; @@ -95,6 +108,9 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.HEALTH_INFO_ENRICHED_WITH_DSL_STATUS)) { out.writeOptionalWriteable(diskHealthInfo); out.writeOptionalWriteable(dslHealthInfo); + if (out.getTransportVersion().onOrAfter(TransportVersions.HEALTH_INFO_ENRICHED_WITH_REPOS)) { + out.writeOptionalWriteable(repositoriesHealthInfo); + } } else { // BWC for pre-8.12 the disk health info was mandatory. Evolving this request has proven tricky however we've made use of // waiting for all nodes to be on the {@link TransportVersions.HEALTH_INFO_ENRICHED_WITH_DSL_STATUS} transport version @@ -106,14 +122,14 @@ public void writeTo(StreamOutput out) throws IOException { @Override public String getDescription() { - return "Update health info cache for node [" - + nodeId - + "] with disk health info [" - + diskHealthInfo - + "] and DSL health info" - + " [" - + dslHealthInfo - + "]."; + return String.format( + Locale.ROOT, + "Update health info cache for node [%s] with disk health info [%s], DSL health info [%s], repositories health info [%s].", + nodeId, + diskHealthInfo, + dslHealthInfo, + repositoriesHealthInfo + ); } @Override @@ -127,12 +143,44 @@ public boolean equals(Object o) { Request request = (Request) o; return Objects.equals(nodeId, request.nodeId) && Objects.equals(diskHealthInfo, request.diskHealthInfo) - && Objects.equals(dslHealthInfo, request.dslHealthInfo); + && Objects.equals(dslHealthInfo, request.dslHealthInfo) + && Objects.equals(repositoriesHealthInfo, request.repositoriesHealthInfo); } @Override public int hashCode() { - return Objects.hash(nodeId, diskHealthInfo, dslHealthInfo); + return Objects.hash(nodeId, diskHealthInfo, dslHealthInfo, repositoriesHealthInfo); + } + + public static class Builder { + private String nodeId; + private DiskHealthInfo diskHealthInfo; + private RepositoriesHealthInfo repositoriesHealthInfo; + private DataStreamLifecycleHealthInfo dslHealthInfo; + + public Builder nodeId(String nodeId) { + this.nodeId = nodeId; + return this; + } + + public Builder diskHealthInfo(DiskHealthInfo diskHealthInfo) { + this.diskHealthInfo = diskHealthInfo; + return this; + } + + public Builder repositoriesHealthInfo(RepositoriesHealthInfo repositoriesHealthInfo) { + this.repositoriesHealthInfo = repositoriesHealthInfo; + return this; + } + + public Builder dslHealthInfo(DataStreamLifecycleHealthInfo dslHealthInfo) { + this.dslHealthInfo = dslHealthInfo; + return this; + } + + public Request build() { + return new Request(nodeId, diskHealthInfo, repositoriesHealthInfo); + } } } @@ -174,7 +222,12 @@ protected void healthOperation( ClusterState clusterState, ActionListener listener ) { - nodeHealthOverview.updateNodeHealth(request.getNodeId(), request.getDiskHealthInfo(), request.getDslHealthInfo()); + nodeHealthOverview.updateNodeHealth( + request.getNodeId(), + request.getDiskHealthInfo(), + request.getDslHealthInfo(), + request.getRepositoriesHealthInfo() + ); listener.onResponse(AcknowledgedResponse.of(true)); } } diff --git a/server/src/main/java/org/elasticsearch/health/node/tracker/DiskHealthTracker.java b/server/src/main/java/org/elasticsearch/health/node/tracker/DiskHealthTracker.java new file mode 100644 index 0000000000000..a478130d83a78 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/health/node/tracker/DiskHealthTracker.java @@ -0,0 +1,136 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.health.node.tracker; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; +import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.DiskUsage; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.health.HealthStatus; +import org.elasticsearch.health.metadata.HealthMetadata; +import org.elasticsearch.health.node.DiskHealthInfo; +import org.elasticsearch.health.node.UpdateHealthInfoCacheAction; +import org.elasticsearch.node.NodeService; + +import java.util.Set; + +/** + * Determines the disk health of this node by checking if it exceeds the thresholds defined in the health metadata. + */ +public class DiskHealthTracker extends HealthTracker { + private static final Logger logger = LogManager.getLogger(DiskHealthTracker.class); + + private final NodeService nodeService; + private final ClusterService clusterService; + + public DiskHealthTracker(NodeService nodeService, ClusterService clusterService) { + this.nodeService = nodeService; + this.clusterService = clusterService; + } + + /** + * Determines the disk health of this node by checking if it exceeds the thresholds defined in the health metadata. + * + * @return the current disk health info. + */ + @Override + public DiskHealthInfo checkCurrentHealth() { + var clusterState = clusterService.state(); + var healthMetadata = HealthMetadata.getFromClusterState(clusterState); + DiscoveryNode node = clusterState.getNodes().getLocalNode(); + HealthMetadata.Disk diskMetadata = healthMetadata.getDiskMetadata(); + DiskUsage usage = getDiskUsage(); + if (usage == null) { + return new DiskHealthInfo(HealthStatus.UNKNOWN, DiskHealthInfo.Cause.NODE_HAS_NO_DISK_STATS); + } + + ByteSizeValue totalBytes = ByteSizeValue.ofBytes(usage.totalBytes()); + + if (node.isDedicatedFrozenNode() || isDedicatedSearchNode(node)) { + long frozenFloodStageThreshold = diskMetadata.getFreeBytesFrozenFloodStageWatermark(totalBytes).getBytes(); + if (usage.freeBytes() < frozenFloodStageThreshold) { + logger.debug("Flood stage disk watermark [{}] exceeded on {}", frozenFloodStageThreshold, usage); + return new DiskHealthInfo(HealthStatus.RED, DiskHealthInfo.Cause.FROZEN_NODE_OVER_FLOOD_STAGE_THRESHOLD); + } + return new DiskHealthInfo(HealthStatus.GREEN); + } + long floodStageThreshold = diskMetadata.getFreeBytesFloodStageWatermark(totalBytes).getBytes(); + if (usage.freeBytes() < floodStageThreshold) { + logger.debug("Flood stage disk watermark [{}] exceeded on {}", floodStageThreshold, usage); + return new DiskHealthInfo(HealthStatus.RED, DiskHealthInfo.Cause.NODE_OVER_THE_FLOOD_STAGE_THRESHOLD); + } + + long highThreshold = diskMetadata.getFreeBytesHighWatermark(totalBytes).getBytes(); + if (usage.freeBytes() < highThreshold) { + if (node.canContainData()) { + // for data nodes only report YELLOW if shards can't move away from the node + if (DiskHealthTracker.hasRelocatingShards(clusterState, node) == false) { + logger.debug("High disk watermark [{}] exceeded on {}", highThreshold, usage); + return new DiskHealthInfo(HealthStatus.YELLOW, DiskHealthInfo.Cause.NODE_OVER_HIGH_THRESHOLD); + } + } else { + // for non-data nodes report YELLOW when the disk high watermark is breached + logger.debug("High disk watermark [{}] exceeded on {}", highThreshold, usage); + return new DiskHealthInfo(HealthStatus.YELLOW, DiskHealthInfo.Cause.NODE_OVER_HIGH_THRESHOLD); + } + } + return new DiskHealthInfo(HealthStatus.GREEN); + } + + @Override + public void addToRequestBuilder(UpdateHealthInfoCacheAction.Request.Builder builder, DiskHealthInfo healthInfo) { + builder.diskHealthInfo(healthInfo); + } + + private static boolean isDedicatedSearchNode(DiscoveryNode node) { + Set roles = node.getRoles(); + return roles.contains(DiscoveryNodeRole.SEARCH_ROLE) + && roles.stream().filter(DiscoveryNodeRole::canContainData).anyMatch(r -> r != DiscoveryNodeRole.SEARCH_ROLE) == false; + } + + private DiskUsage getDiskUsage() { + NodeStats nodeStats = nodeService.stats( + CommonStatsFlags.NONE, + false, + false, + false, + false, + false, + true, + false, + false, + false, + false, + false, + false, + false, + false, + false, + false + ); + return DiskUsage.findLeastAvailablePath(nodeStats); + } + + static boolean hasRelocatingShards(ClusterState clusterState, DiscoveryNode node) { + RoutingNode routingNode = clusterState.getRoutingNodes().node(node.getId()); + if (routingNode == null) { + // routing node will be null for non-data nodes + return false; + } + return routingNode.numberOfShardsWithState(ShardRoutingState.RELOCATING) > 0; + } +} diff --git a/server/src/main/java/org/elasticsearch/health/node/tracker/HealthTracker.java b/server/src/main/java/org/elasticsearch/health/node/tracker/HealthTracker.java new file mode 100644 index 0000000000000..2dd71a38f959e --- /dev/null +++ b/server/src/main/java/org/elasticsearch/health/node/tracker/HealthTracker.java @@ -0,0 +1,105 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.health.node.tracker; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.health.node.LocalHealthMonitor; +import org.elasticsearch.health.node.UpdateHealthInfoCacheAction; + +import java.util.Objects; +import java.util.concurrent.atomic.AtomicReference; + +/** + * Base class for health trackers that will be executed by the {@link LocalHealthMonitor}. It keeps track of the last + * reported value and can retrieve the current health status when requested. + * + * @param the type of the health check result they track + */ +public abstract class HealthTracker { + private static final Logger logger = LogManager.getLogger(HealthTracker.class); + + private final AtomicReference lastReportedValue = new AtomicReference<>(); + + /** + * Determine the health info for this health check. + * + * @return the health info. + */ + public abstract T checkCurrentHealth(); + + /** + * Add the health info to the request builder. + * + * @param builder the builder to add the health info to. + * @param healthInfo the health info to add. + */ + public abstract void addToRequestBuilder(UpdateHealthInfoCacheAction.Request.Builder builder, T healthInfo); + + /** + * Create a new {@link HealthProgress} instance by getting the current last reported value and determining the health info at this time. + * + * @return the new {@link HealthProgress} instance. + */ + public HealthProgress trackHealth() { + return new HealthProgress<>(this, lastReportedValue.get(), checkCurrentHealth()); + } + + /** + * Update the last reported health info to current, but only when the value inside lastReportedValue + * is equal to previous. + * + * @param previous the previous value that should be in lastReportedValue at the time of execution. + * @param current the value that should be stored in lastReportedValue. + */ + public void updateLastReportedHealth(T previous, T current) { + if (lastReportedValue.compareAndSet(previous, current)) { + logger.debug("Health info [{}] successfully sent, last reported value: {}.", current, previous); + } + } + + /** + * Reset the value of lastReportedValue to null. + * Should be used when, for example, the master or health node has changed. + */ + public void reset() { + lastReportedValue.set(null); + } + + public T getLastReportedValue() { + return lastReportedValue.get(); + } + + /** + * A record for storing the previous and current value of a health check. This allows us to be sure no concurrent processes have + * updated the health check's reference value. + * + * @param the type that the health tracker returns + */ + public record HealthProgress(HealthTracker healthTracker, T previousHealth, T currentHealth) { + public boolean hasChanged() { + return Objects.equals(previousHealth, currentHealth) == false; + } + + /** + * See {@link HealthTracker#addToRequestBuilder}. + */ + public void updateRequestBuilder(UpdateHealthInfoCacheAction.Request.Builder builder) { + healthTracker.addToRequestBuilder(builder, currentHealth); + } + + /** + * Update the reference value of the health tracker with the current health info. + * See {@link HealthTracker#updateLastReportedHealth} for more info. + */ + public void recordProgressIfRelevant() { + healthTracker.updateLastReportedHealth(previousHealth, currentHealth); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/health/node/tracker/RepositoriesHealthTracker.java b/server/src/main/java/org/elasticsearch/health/node/tracker/RepositoriesHealthTracker.java new file mode 100644 index 0000000000000..cffc470045e0b --- /dev/null +++ b/server/src/main/java/org/elasticsearch/health/node/tracker/RepositoriesHealthTracker.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.health.node.tracker; + +import org.elasticsearch.health.node.RepositoriesHealthInfo; +import org.elasticsearch.health.node.UpdateHealthInfoCacheAction; +import org.elasticsearch.repositories.InvalidRepository; +import org.elasticsearch.repositories.RepositoriesService; +import org.elasticsearch.repositories.UnknownTypeRepository; + +import java.util.ArrayList; +import java.util.List; + +/** + * Determines the health of repositories on this node. + */ +public class RepositoriesHealthTracker extends HealthTracker { + private final RepositoriesService repositoriesService; + + public RepositoriesHealthTracker(RepositoriesService repositoriesService) { + this.repositoriesService = repositoriesService; + } + + /** + * Determine the health of the repositories on this node. Do so by checking the current collection of registered repositories. + * + * @return the current repositories health on this node. + */ + @Override + public RepositoriesHealthInfo checkCurrentHealth() { + var repositories = repositoriesService.getRepositories(); + if (repositories.isEmpty()) { + return new RepositoriesHealthInfo(List.of(), List.of()); + } + + var unknown = new ArrayList(); + var invalid = new ArrayList(); + repositories.values().forEach(repository -> { + if (repository instanceof UnknownTypeRepository) { + unknown.add(repository.getMetadata().name()); + } else if (repository instanceof InvalidRepository) { + invalid.add(repository.getMetadata().name()); + } + }); + return new RepositoriesHealthInfo(unknown, invalid); + } + + @Override + public void addToRequestBuilder(UpdateHealthInfoCacheAction.Request.Builder builder, RepositoriesHealthInfo healthInfo) { + builder.repositoriesHealthInfo(healthInfo); + } +} diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 24c8b87bcff50..02c59a6f015ed 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -98,6 +98,9 @@ import org.elasticsearch.health.node.LocalHealthMonitor; import org.elasticsearch.health.node.ShardsCapacityHealthIndicatorService; import org.elasticsearch.health.node.selection.HealthNodeTaskExecutor; +import org.elasticsearch.health.node.tracker.DiskHealthTracker; +import org.elasticsearch.health.node.tracker.HealthTracker; +import org.elasticsearch.health.node.tracker.RepositoriesHealthTracker; import org.elasticsearch.health.stats.HealthApiStats; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.index.IndexSettingProvider; @@ -1018,7 +1021,8 @@ record PluginServiceInstances( transportService, featureService, threadPool, - telemetryProvider + telemetryProvider, + repositoryService ) ); @@ -1171,7 +1175,8 @@ private Module loadDiagnosticServices( TransportService transportService, FeatureService featureService, ThreadPool threadPool, - TelemetryProvider telemetryProvider + TelemetryProvider telemetryProvider, + RepositoriesService repositoriesService ) { MasterHistoryService masterHistoryService = new MasterHistoryService(transportService, threadPool, clusterService); @@ -1203,13 +1208,18 @@ private Module loadDiagnosticServices( telemetryProvider ); HealthMetadataService healthMetadataService = HealthMetadataService.create(clusterService, featureService, settings); + + List> healthTrackers = List.of( + new DiskHealthTracker(nodeService, clusterService), + new RepositoriesHealthTracker(repositoriesService) + ); LocalHealthMonitor localHealthMonitor = LocalHealthMonitor.create( settings, clusterService, - nodeService, threadPool, client, - featureService + featureService, + healthTrackers ); HealthInfoCache nodeHealthOverview = HealthInfoCache.create(clusterService); diff --git a/server/src/main/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorService.java b/server/src/main/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorService.java index 96b06e8b49f2d..0b460b5cb2fb7 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorService.java @@ -8,29 +8,39 @@ package org.elasticsearch.snapshots; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.RepositoriesMetadata; import org.elasticsearch.cluster.metadata.RepositoryMetadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.health.Diagnosis; -import org.elasticsearch.health.Diagnosis.Resource.Type; import org.elasticsearch.health.HealthIndicatorDetails; import org.elasticsearch.health.HealthIndicatorImpact; import org.elasticsearch.health.HealthIndicatorResult; import org.elasticsearch.health.HealthIndicatorService; +import org.elasticsearch.health.HealthStatus; import org.elasticsearch.health.ImpactArea; import org.elasticsearch.health.SimpleHealthIndicatorDetails; import org.elasticsearch.health.node.HealthInfo; +import org.elasticsearch.health.node.RepositoriesHealthInfo; import org.elasticsearch.repositories.RepositoryData; -import java.util.Collections; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; -import static org.elasticsearch.common.Strings.collectionToDelimitedStringWithLimit; +import static org.elasticsearch.cluster.node.DiscoveryNode.DISCOVERY_NODE_COMPARATOR; import static org.elasticsearch.common.util.CollectionUtils.limitSize; +import static org.elasticsearch.health.Diagnosis.Resource.Type.SNAPSHOT_REPOSITORY; import static org.elasticsearch.health.HealthStatus.GREEN; -import static org.elasticsearch.health.HealthStatus.RED; +import static org.elasticsearch.health.HealthStatus.UNKNOWN; +import static org.elasticsearch.health.HealthStatus.YELLOW; /** * This indicator reports health for snapshot repositories. @@ -44,11 +54,22 @@ public class RepositoryIntegrityHealthIndicatorService implements HealthIndicato public static final String NAME = "repository_integrity"; - public static final String HELP_URL = "https://ela.st/fix-repository-integrity"; + private static final String HELP_URL = "https://ela.st/fix-repository-integrity"; - public static final String REPOSITORY_CORRUPTED_IMPACT_ID = "repository_corruption"; + public static final String NO_REPOS_CONFIGURED = "No snapshot repositories configured."; + public static final String ALL_REPOS_HEALTHY = "All repositories are healthy."; + public static final String NO_REPO_HEALTH_INFO = "No repository health info."; - public static final Diagnosis.Definition CORRUPTED_REPOSITORY = new Diagnosis.Definition( + public static final List IMPACTS = List.of( + new HealthIndicatorImpact( + NAME, + "backups_at_risk", + 2, + "Data in the affected snapshot repositories may be lost and cannot be restored.", + List.of(ImpactArea.BACKUP) + ) + ); + public static final Diagnosis.Definition CORRUPTED_DEFINITION = new Diagnosis.Definition( NAME, "corrupt_repo_integrity", "Multiple clusters are writing to the same repository.", @@ -56,9 +77,22 @@ public class RepositoryIntegrityHealthIndicatorService implements HealthIndicato + " to this cluster.", HELP_URL ); - - public static final String NO_REPOS_CONFIGURED = "No snapshot repositories configured."; - public static final String NO_CORRUPT_REPOS = "No corrupted snapshot repositories."; + public static final Diagnosis.Definition UNKNOWN_DEFINITION = new Diagnosis.Definition( + NAME, + "unknown_repository", + "The repository uses an unknown type.", + "Ensure that all required plugins are installed on the affected nodes.", + HELP_URL + ); + public static final Diagnosis.Definition INVALID_DEFINITION = new Diagnosis.Definition( + NAME, + "invalid_repository", + "An exception occurred while trying to initialize the repository.", + """ + Make sure all nodes in the cluster are in sync with each other.\ + Refer to the nodes’ logs for detailed information on why the repository initialization failed.""", + HELP_URL + ); private final ClusterService clusterService; @@ -73,78 +107,162 @@ public String name() { @Override public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResourcesCount, HealthInfo healthInfo) { + var clusterState = clusterService.state(); var snapshotMetadata = RepositoriesMetadata.get(clusterService.state()); - if (snapshotMetadata.repositories().isEmpty()) { - return createIndicator( - GREEN, - NO_REPOS_CONFIGURED, - HealthIndicatorDetails.EMPTY, - Collections.emptyList(), - Collections.emptyList() - ); + var repositories = snapshotMetadata.repositories(); + if (repositories.isEmpty()) { + return createIndicator(GREEN, NO_REPOS_CONFIGURED, HealthIndicatorDetails.EMPTY, List.of(), List.of()); } - var corrupted = snapshotMetadata.repositories() - .stream() - .filter(repository -> repository.generation() == RepositoryData.CORRUPTED_REPO_GEN) - .map(RepositoryMetadata::name) - .toList(); - - var totalRepositories = snapshotMetadata.repositories().size(); - var corruptedRepositories = corrupted.size(); - - if (corrupted.isEmpty()) { - return createIndicator( - GREEN, - "No corrupted snapshot repositories.", - verbose ? new SimpleHealthIndicatorDetails(Map.of("total_repositories", totalRepositories)) : HealthIndicatorDetails.EMPTY, - Collections.emptyList(), - Collections.emptyList() - ); - } - List impacts = Collections.singletonList( - new HealthIndicatorImpact( - NAME, - REPOSITORY_CORRUPTED_IMPACT_ID, - 1, - String.format( - Locale.ROOT, - "Data in corrupted snapshot repositor%s %s may be lost and cannot be restored.", - corrupted.size() > 1 ? "ies" : "y", - limitSize(corrupted, 10) - ), - List.of(ImpactArea.BACKUP) - ) - ); + var repositoryHealthAnalyzer = new RepositoryHealthAnalyzer(clusterState, repositories, healthInfo.repositoriesInfoByNode()); return createIndicator( - RED, - createCorruptedRepositorySummary(corrupted), - verbose - ? new SimpleHealthIndicatorDetails( - Map.of( - "total_repositories", - totalRepositories, - "corrupted_repositories", - corruptedRepositories, - "corrupted", - limitSize(corrupted, 10) - ) - ) - : HealthIndicatorDetails.EMPTY, - impacts, - List.of( - new Diagnosis( - CORRUPTED_REPOSITORY, - List.of(new Diagnosis.Resource(Type.SNAPSHOT_REPOSITORY, limitSize(corrupted, maxAffectedResourcesCount))) - ) - ) + repositoryHealthAnalyzer.getHealthStatus(), + repositoryHealthAnalyzer.getSymptom(), + repositoryHealthAnalyzer.getDetails(verbose), + repositoryHealthAnalyzer.getImpacts(), + repositoryHealthAnalyzer.getDiagnoses(maxAffectedResourcesCount) ); } - private static String createCorruptedRepositorySummary(List corrupted) { - var message = new StringBuilder().append("Detected [").append(corrupted.size()).append("] corrupted snapshot repositories: "); - collectionToDelimitedStringWithLimit(corrupted, ",", "[", "].", 1024, message); - return message.toString(); + /** + * Analyzer for the cluster's repositories health; aids in constructing a {@link HealthIndicatorResult}. + */ + static class RepositoryHealthAnalyzer { + private final ClusterState clusterState; + private final int totalRepositories; + private final List corruptedRepositories; + private final Set unknownRepositories = new HashSet<>(); + private final Set nodesWithUnknownRepos = new HashSet<>(); + private final Set invalidRepositories = new HashSet<>(); + private final Set nodesWithInvalidRepos = new HashSet<>(); + private final HealthStatus healthStatus; + + private RepositoryHealthAnalyzer( + ClusterState clusterState, + List repositories, + Map repositoriesHealthByNode + ) { + this.clusterState = clusterState; + this.totalRepositories = repositories.size(); + this.corruptedRepositories = repositories.stream() + .filter(repository -> repository.generation() == RepositoryData.CORRUPTED_REPO_GEN) + .map(RepositoryMetadata::name) + .sorted() + .toList(); + + repositoriesHealthByNode.forEach((nodeId, healthInfo) -> { + unknownRepositories.addAll(healthInfo.unknownRepositories()); + if (healthInfo.unknownRepositories().isEmpty() == false) { + nodesWithUnknownRepos.add(nodeId); + } + invalidRepositories.addAll(healthInfo.invalidRepositories()); + if (healthInfo.invalidRepositories().isEmpty() == false) { + nodesWithInvalidRepos.add(nodeId); + } + }); + + if (corruptedRepositories.isEmpty() == false + || unknownRepositories.isEmpty() == false + || invalidRepositories.isEmpty() == false) { + healthStatus = YELLOW; + } else if (repositoriesHealthByNode.isEmpty()) { + healthStatus = UNKNOWN; + } else { + healthStatus = GREEN; + } + } + + public HealthStatus getHealthStatus() { + return healthStatus; + } + + public String getSymptom() { + if (healthStatus == GREEN) { + return ALL_REPOS_HEALTHY; + } else if (healthStatus == UNKNOWN) { + return NO_REPO_HEALTH_INFO; + } + + return "Detected " + + Stream.of( + generateSymptomString("corrupted", corruptedRepositories.size()), + generateSymptomString("unknown", unknownRepositories.size()), + generateSymptomString("invalid", invalidRepositories.size()) + ).filter(Objects::nonNull).collect(Collectors.joining(", and ")) + + "."; + } + + private static String generateSymptomString(String type, long size) { + if (size == 0) { + return null; + } + + return String.format(Locale.ROOT, "[%d] %s snapshot repositor%s", size, type, size > 1 ? "ies" : "y"); + } + + public HealthIndicatorDetails getDetails(boolean verbose) { + if (verbose == false) { + return HealthIndicatorDetails.EMPTY; + } + Map map = new HashMap<>(); + map.put("total_repositories", totalRepositories); + + if (healthStatus != GREEN) { + map.put("corrupted_repositories", corruptedRepositories.size()); + map.put("corrupted", limitSize(corruptedRepositories, 10)); + + if (healthStatus != UNKNOWN) { + map.put("unknown_repositories", unknownRepositories.size()); + map.put("invalid_repositories", invalidRepositories.size()); + } + } + + return new SimpleHealthIndicatorDetails(map); + } + + public List getImpacts() { + if (healthStatus == GREEN || healthStatus == UNKNOWN) { + return List.of(); + } + return IMPACTS; + } + + public List getDiagnoses(int maxAffectedResourcesCount) { + var diagnoses = new ArrayList(); + if (corruptedRepositories.isEmpty() == false) { + diagnoses.add( + new Diagnosis( + CORRUPTED_DEFINITION, + List.of(new Diagnosis.Resource(SNAPSHOT_REPOSITORY, limitSize(corruptedRepositories, maxAffectedResourcesCount))) + ) + ); + } + if (unknownRepositories.size() > 0) { + diagnoses.add(createDiagnosis(UNKNOWN_DEFINITION, unknownRepositories, nodesWithUnknownRepos, maxAffectedResourcesCount)); + } + if (invalidRepositories.size() > 0) { + diagnoses.add(createDiagnosis(INVALID_DEFINITION, invalidRepositories, nodesWithInvalidRepos, maxAffectedResourcesCount)); + } + return diagnoses; + } + + private Diagnosis createDiagnosis( + Diagnosis.Definition definition, + Set repos, + Set nodes, + int maxAffectedResourcesCount + ) { + var reposView = repos.stream().sorted().limit(maxAffectedResourcesCount).toList(); + var nodesView = nodes.stream() + .map(nodeId -> clusterState.nodes().get(nodeId)) + .sorted(DISCOVERY_NODE_COMPARATOR) + .limit(maxAffectedResourcesCount) + .toList(); + return new Diagnosis( + definition, + List.of(new Diagnosis.Resource(SNAPSHOT_REPOSITORY, reposView), new Diagnosis.Resource(nodesView)) + ); + } } } diff --git a/server/src/test/java/org/elasticsearch/health/HealthServiceTests.java b/server/src/test/java/org/elasticsearch/health/HealthServiceTests.java index 8ca531b678c4a..f7e2bb34740a7 100644 --- a/server/src/test/java/org/elasticsearch/health/HealthServiceTests.java +++ b/server/src/test/java/org/elasticsearch/health/HealthServiceTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.health.node.DataStreamLifecycleHealthInfo; -import org.elasticsearch.health.node.DiskHealthInfo; import org.elasticsearch.health.node.FetchHealthInfoCacheAction; import org.elasticsearch.health.node.HealthInfo; import org.elasticsearch.test.ESTestCase; @@ -22,17 +21,18 @@ import org.junit.After; import org.junit.Before; -import java.util.HashMap; import java.util.List; -import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Stream; +import static org.elasticsearch.core.Tuple.tuple; import static org.elasticsearch.health.HealthStatus.GREEN; import static org.elasticsearch.health.HealthStatus.RED; import static org.elasticsearch.health.HealthStatus.UNKNOWN; import static org.elasticsearch.health.HealthStatus.YELLOW; +import static org.elasticsearch.health.node.HealthInfoTests.randomDiskHealthInfo; +import static org.elasticsearch.health.node.HealthInfoTests.randomRepoHealthInfo; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.is; @@ -248,12 +248,9 @@ public void testThatIndicatorsGetHealthInfoData() throws Exception { var networkLatency = new HealthIndicatorResult("network_latency", GREEN, null, null, null, null); var slowTasks = new HealthIndicatorResult("slow_task_assignment", YELLOW, null, null, null, null); var shardsAvailable = new HealthIndicatorResult("shards_availability", GREEN, null, null, null, null); - Map diskHealthInfoMap = new HashMap<>(); - diskHealthInfoMap.put( - randomAlphaOfLength(30), - new DiskHealthInfo(randomFrom(HealthStatus.values()), randomFrom(DiskHealthInfo.Cause.values())) - ); - HealthInfo healthInfo = new HealthInfo(diskHealthInfoMap, DataStreamLifecycleHealthInfo.NO_DSL_ERRORS); + var diskHealthInfoMap = randomMap(1, 1, () -> tuple(randomAlphaOfLength(10), randomDiskHealthInfo())); + var repoHealthInfoMap = randomMap(1, 1, () -> tuple(randomAlphaOfLength(10), randomRepoHealthInfo())); + HealthInfo healthInfo = new HealthInfo(diskHealthInfoMap, DataStreamLifecycleHealthInfo.NO_DSL_ERRORS, repoHealthInfoMap); var service = new HealthService( // The preflight indicator does not get data because the data is not fetched until after the preflight check diff --git a/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java index 1584c4a57dd32..a622c1ff600d6 100644 --- a/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/DiskHealthIndicatorServiceTests.java @@ -258,7 +258,7 @@ public void testRedNoBlockedIndicesAndRedAllRoleNodes() throws IOException { diskInfoByNode.put(discoveryNode.getId(), new DiskHealthInfo(HealthStatus.GREEN)); } } - HealthInfo healthInfo = new HealthInfo(diskInfoByNode, DataStreamLifecycleHealthInfo.NO_DSL_ERRORS); + HealthInfo healthInfo = new HealthInfo(diskInfoByNode, DataStreamLifecycleHealthInfo.NO_DSL_ERRORS, Map.of()); HealthIndicatorResult result = diskHealthIndicatorService.calculate(true, healthInfo); assertThat(result.status(), equalTo(HealthStatus.RED)); @@ -1021,7 +1021,7 @@ private HealthInfo createHealthInfo(List healthInfoConfigs) { diskInfoByNode.put(node.getId(), diskHealthInfo); } } - return new HealthInfo(diskInfoByNode, DataStreamLifecycleHealthInfo.NO_DSL_ERRORS); + return new HealthInfo(diskInfoByNode, DataStreamLifecycleHealthInfo.NO_DSL_ERRORS, Map.of()); } private static ClusterService createClusterService(Collection nodes, boolean withBlockedIndex) { diff --git a/server/src/test/java/org/elasticsearch/health/node/FetchHealthInfoCacheActionTests.java b/server/src/test/java/org/elasticsearch/health/node/FetchHealthInfoCacheActionTests.java index f921c03686da4..f497a9f02dda6 100644 --- a/server/src/test/java/org/elasticsearch/health/node/FetchHealthInfoCacheActionTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/FetchHealthInfoCacheActionTests.java @@ -29,13 +29,13 @@ import org.junit.BeforeClass; import java.util.Collections; -import java.util.HashMap; -import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.health.node.HealthInfoTests.mutateHealthInfo; import static org.elasticsearch.health.node.HealthInfoTests.randomDslHealthInfo; +import static org.elasticsearch.health.node.HealthInfoTests.randomRepoHealthInfo; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.equalTo; @@ -102,7 +102,7 @@ public void testAction() throws ExecutionException, InterruptedException { setState(clusterService, ClusterStateCreationUtils.state(localNode, localNode, localNode, allNodes)); HealthInfoCache healthInfoCache = getTestHealthInfoCache(); final FetchHealthInfoCacheAction.Response expectedResponse = new FetchHealthInfoCacheAction.Response( - new HealthInfo(healthInfoCache.getHealthInfo().diskInfoByNode(), healthInfoCache.getHealthInfo().dslHealthInfo()) + healthInfoCache.getHealthInfo() ); ActionTestUtils.execute( new FetchHealthInfoCacheAction.TransportAction( @@ -128,35 +128,24 @@ private HealthInfoCache getTestHealthInfoCache() { healthInfoCache.updateNodeHealth( nodeId, new DiskHealthInfo(randomFrom(HealthStatus.values()), randomFrom(DiskHealthInfo.Cause.values())), - randomDslHealthInfo() + randomDslHealthInfo(), + randomRepoHealthInfo() ); } return healthInfoCache; } public void testResponseSerialization() { - FetchHealthInfoCacheAction.Response response = new FetchHealthInfoCacheAction.Response( - new HealthInfo(getTestHealthInfoCache().getHealthInfo().diskInfoByNode(), DataStreamLifecycleHealthInfo.NO_DSL_ERRORS) - ); + var healthInfo = getTestHealthInfoCache().getHealthInfo(); + FetchHealthInfoCacheAction.Response response = new FetchHealthInfoCacheAction.Response(healthInfo); EqualsHashCodeTestUtils.checkEqualsAndHashCode( response, - resopnseWritable -> copyWriteable(resopnseWritable, writableRegistry(), FetchHealthInfoCacheAction.Response::new), + responseWritable -> copyWriteable(responseWritable, writableRegistry(), FetchHealthInfoCacheAction.Response::new), this::mutateResponse ); } private FetchHealthInfoCacheAction.Response mutateResponse(FetchHealthInfoCacheAction.Response originalResponse) { - Map diskHealthInfoMap = originalResponse.getHealthInfo().diskInfoByNode(); - Map diskHealthInfoMapCopy = new HashMap<>(diskHealthInfoMap); - diskHealthInfoMapCopy.put( - randomAlphaOfLength(10), - new DiskHealthInfo(randomFrom(HealthStatus.values()), randomFrom(DiskHealthInfo.Cause.values())) - ); - return new FetchHealthInfoCacheAction.Response( - new HealthInfo( - diskHealthInfoMapCopy, - randomValueOtherThan(originalResponse.getHealthInfo().dslHealthInfo(), HealthInfoTests::randomDslHealthInfo) - ) - ); + return new FetchHealthInfoCacheAction.Response(mutateHealthInfo(originalResponse.getHealthInfo())); } } diff --git a/server/src/test/java/org/elasticsearch/health/node/HealthInfoCacheTests.java b/server/src/test/java/org/elasticsearch/health/node/HealthInfoCacheTests.java index fec3504d17218..2d39d4e8a42b5 100644 --- a/server/src/test/java/org/elasticsearch/health/node/HealthInfoCacheTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/HealthInfoCacheTests.java @@ -22,6 +22,7 @@ import java.util.Set; import static org.elasticsearch.health.node.HealthInfoTests.randomDslHealthInfo; +import static org.elasticsearch.health.node.HealthInfoTests.randomRepoHealthInfo; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @@ -46,11 +47,13 @@ public class HealthInfoCacheTests extends ESTestCase { public void testAddHealthInfo() { HealthInfoCache healthInfoCache = HealthInfoCache.create(clusterService); DataStreamLifecycleHealthInfo latestDslHealthInfo = randomDslHealthInfo(); - healthInfoCache.updateNodeHealth(node1.getId(), GREEN, latestDslHealthInfo); - healthInfoCache.updateNodeHealth(node2.getId(), RED, null); + var repoHealthInfo = randomRepoHealthInfo(); + healthInfoCache.updateNodeHealth(node1.getId(), GREEN, latestDslHealthInfo, repoHealthInfo); + healthInfoCache.updateNodeHealth(node2.getId(), RED, null, null); Map diskHealthInfo = healthInfoCache.getHealthInfo().diskInfoByNode(); - healthInfoCache.updateNodeHealth(node1.getId(), RED, null); + // Ensure that HealthInfoCache#getHealthInfo() returns a copy of the health info. + healthInfoCache.updateNodeHealth(node1.getId(), RED, null, null); assertThat(diskHealthInfo.get(node1.getId()), equalTo(GREEN)); assertThat(diskHealthInfo.get(node2.getId()), equalTo(RED)); @@ -60,9 +63,10 @@ public void testAddHealthInfo() { public void testRemoveNodeFromTheCluster() { HealthInfoCache healthInfoCache = HealthInfoCache.create(clusterService); - healthInfoCache.updateNodeHealth(node1.getId(), GREEN, null); + healthInfoCache.updateNodeHealth(node1.getId(), GREEN, null, null); DataStreamLifecycleHealthInfo latestDslHealthInfo = randomDslHealthInfo(); - healthInfoCache.updateNodeHealth(node2.getId(), RED, latestDslHealthInfo); + var repoHealthInfo = randomRepoHealthInfo(); + healthInfoCache.updateNodeHealth(node2.getId(), RED, latestDslHealthInfo, repoHealthInfo); ClusterState previous = ClusterStateCreationUtils.state(node1, node1, node1, allNodes); ClusterState current = ClusterStateCreationUtils.state(node1, node1, node1, new DiscoveryNode[] { node1 }); @@ -78,8 +82,8 @@ public void testRemoveNodeFromTheCluster() { public void testNotAHealthNode() { HealthInfoCache healthInfoCache = HealthInfoCache.create(clusterService); - healthInfoCache.updateNodeHealth(node1.getId(), GREEN, randomDslHealthInfo()); - healthInfoCache.updateNodeHealth(node2.getId(), RED, null); + healthInfoCache.updateNodeHealth(node1.getId(), GREEN, randomDslHealthInfo(), randomRepoHealthInfo()); + healthInfoCache.updateNodeHealth(node2.getId(), RED, null, null); ClusterState previous = ClusterStateCreationUtils.state(node1, node1, node1, allNodes); ClusterState current = ClusterStateCreationUtils.state(node1, node1, node2, allNodes); @@ -88,5 +92,7 @@ public void testNotAHealthNode() { Map diskHealthInfo = healthInfoCache.getHealthInfo().diskInfoByNode(); assertThat(diskHealthInfo.isEmpty(), equalTo(true)); assertThat(healthInfoCache.getHealthInfo().dslHealthInfo(), is(nullValue())); + Map repoHealthInfo = healthInfoCache.getHealthInfo().repositoriesInfoByNode(); + assertThat(repoHealthInfo.isEmpty(), equalTo(true)); } } diff --git a/server/src/test/java/org/elasticsearch/health/node/HealthInfoTests.java b/server/src/test/java/org/elasticsearch/health/node/HealthInfoTests.java index c8ccda1c5b88d..b91fce1623b5e 100644 --- a/server/src/test/java/org/elasticsearch/health/node/HealthInfoTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/HealthInfoTests.java @@ -14,6 +14,9 @@ import java.util.HashMap; import java.util.Map; +import java.util.function.Supplier; + +import static org.elasticsearch.core.Tuple.tuple; public class HealthInfoTests extends AbstractWireSerializingTestCase { @Override @@ -23,60 +26,73 @@ protected Writeable.Reader instanceReader() { @Override protected HealthInfo createTestInstance() { - int numberOfNodes = randomIntBetween(0, 200); - Map diskInfoByNode = new HashMap<>(numberOfNodes); - for (int i = 0; i < numberOfNodes; i++) { - DiskHealthInfo diskHealthInfo = randomBoolean() - ? new DiskHealthInfo(randomFrom(HealthStatus.values())) - : new DiskHealthInfo(randomFrom(HealthStatus.values()), randomFrom(DiskHealthInfo.Cause.values())); - diskInfoByNode.put(randomAlphaOfLengthBetween(10, 100), diskHealthInfo); - } - return new HealthInfo(diskInfoByNode, randomBoolean() ? randomDslHealthInfo() : null); + var diskInfoByNode = randomMap(0, 10, () -> tuple(randomAlphaOfLength(10), randomDiskHealthInfo())); + var repositoriesInfoByNode = randomMap(0, 10, () -> tuple(randomAlphaOfLength(10), randomRepoHealthInfo())); + return new HealthInfo(diskInfoByNode, randomBoolean() ? randomDslHealthInfo() : null, repositoriesInfoByNode); } @Override public HealthInfo mutateInstance(HealthInfo originalHealthInfo) { - Map diskHealthInfoMap = originalHealthInfo.diskInfoByNode(); - Map diskHealthInfoMapCopy = new HashMap<>(diskHealthInfoMap); - if (diskHealthInfoMap.isEmpty()) { - diskHealthInfoMapCopy.put( - randomAlphaOfLength(10), - new DiskHealthInfo(randomFrom(HealthStatus.values()), randomFrom(DiskHealthInfo.Cause.values())) + return mutateHealthInfo(originalHealthInfo); + } + + public static HealthInfo mutateHealthInfo(HealthInfo originalHealthInfo) { + var diskHealth = originalHealthInfo.diskInfoByNode(); + var dslHealth = originalHealthInfo.dslHealthInfo(); + var repoHealth = originalHealthInfo.repositoriesInfoByNode(); + switch (randomInt(2)) { + case 0 -> diskHealth = mutateMap( + originalHealthInfo.diskInfoByNode(), + () -> randomAlphaOfLength(10), + HealthInfoTests::randomDiskHealthInfo ); + case 1 -> dslHealth = randomValueOtherThan(originalHealthInfo.dslHealthInfo(), HealthInfoTests::randomDslHealthInfo); + case 2 -> repoHealth = mutateMap( + originalHealthInfo.repositoriesInfoByNode(), + () -> randomAlphaOfLength(10), + HealthInfoTests::randomRepoHealthInfo + ); + } + return new HealthInfo(diskHealth, dslHealth, repoHealth); + } + + public static DiskHealthInfo randomDiskHealthInfo() { + return randomBoolean() + ? new DiskHealthInfo(randomFrom(HealthStatus.values())) + : new DiskHealthInfo(randomFrom(HealthStatus.values()), randomFrom(DiskHealthInfo.Cause.values())); + } + + public static DataStreamLifecycleHealthInfo randomDslHealthInfo() { + return new DataStreamLifecycleHealthInfo( + randomList(5, () -> new DslErrorInfo(randomAlphaOfLength(100), System.currentTimeMillis(), randomIntBetween(15, 500))), + randomIntBetween(6, 1000) + ); + } + + public static RepositoriesHealthInfo randomRepoHealthInfo() { + return new RepositoriesHealthInfo(randomList(5, () -> randomAlphaOfLength(10)), randomList(5, () -> randomAlphaOfLength(10))); + } + + /** + * Mutates a {@link Map} by either adding, updating, or removing an entry. + */ + public static Map mutateMap(Map original, Supplier randomKeySupplier, Supplier randomValueSupplier) { + Map mapCopy = new HashMap<>(original); + if (original.isEmpty()) { + mapCopy.put(randomKeySupplier.get(), randomValueSupplier.get()); } else { switch (randomIntBetween(1, 3)) { - case 1 -> { - diskHealthInfoMapCopy.put( - randomAlphaOfLength(10), - new DiskHealthInfo(randomFrom(HealthStatus.values()), randomFrom(DiskHealthInfo.Cause.values())) - ); - } + case 1 -> mapCopy.put(randomKeySupplier.get(), randomValueSupplier.get()); case 2 -> { - String someNode = randomFrom(diskHealthInfoMap.keySet()); - diskHealthInfoMapCopy.put( - someNode, - new DiskHealthInfo( - randomValueOtherThan(diskHealthInfoMap.get(someNode).healthStatus(), () -> randomFrom(HealthStatus.values())), - randomFrom(DiskHealthInfo.Cause.values()) - ) - ); + K someKey = randomFrom(original.keySet()); + mapCopy.put(someKey, randomValueOtherThan(original.get(someKey), randomValueSupplier)); } case 3 -> { - diskHealthInfoMapCopy.remove(randomFrom(diskHealthInfoMapCopy.keySet())); + mapCopy.remove(randomFrom(mapCopy.keySet())); } default -> throw new IllegalStateException(); } } - return new HealthInfo( - diskHealthInfoMapCopy, - randomValueOtherThan(originalHealthInfo.dslHealthInfo(), HealthInfoTests::randomDslHealthInfo) - ); - } - - static DataStreamLifecycleHealthInfo randomDslHealthInfo() { - return new DataStreamLifecycleHealthInfo( - randomList(5, () -> new DslErrorInfo(randomAlphaOfLength(100), System.currentTimeMillis(), randomIntBetween(15, 500))), - randomIntBetween(6, 1000) - ); + return mapCopy; } } diff --git a/server/src/test/java/org/elasticsearch/health/node/LocalHealthMonitorTests.java b/server/src/test/java/org/elasticsearch/health/node/LocalHealthMonitorTests.java index 301642a7d7dcb..bb24d3118d77b 100644 --- a/server/src/test/java/org/elasticsearch/health/node/LocalHealthMonitorTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/LocalHealthMonitorTests.java @@ -9,8 +9,6 @@ package org.elasticsearch.health.node; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; -import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.replication.ClusterStateCreationUtils; import org.elasticsearch.client.internal.Client; @@ -19,8 +17,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; @@ -32,8 +28,7 @@ import org.elasticsearch.health.HealthStatus; import org.elasticsearch.health.metadata.HealthMetadata; import org.elasticsearch.health.node.selection.HealthNodeExecutorTests; -import org.elasticsearch.monitor.fs.FsInfo; -import org.elasticsearch.node.NodeService; +import org.elasticsearch.health.node.tracker.HealthTracker; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -46,12 +41,9 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; -import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.state; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -60,16 +52,14 @@ public class LocalHealthMonitorTests extends ESTestCase { private static final DiskHealthInfo GREEN = new DiskHealthInfo(HealthStatus.GREEN, null); private static ThreadPool threadPool; - private NodeService nodeService; private ClusterService clusterService; private DiscoveryNode node; private DiscoveryNode frozenNode; - private DiscoveryNode searchNode; - private DiscoveryNode searchAndIndexNode; private HealthMetadata healthMetadata; private ClusterState clusterState; private Client client; - private FeatureService featureService; + private MockHealthTracker mockHealthTracker; + private LocalHealthMonitor localHealthMonitor; @BeforeClass public static void setUpThreadPool() { @@ -82,6 +72,7 @@ public static void tearDownThreadPool() { } @Before + @SuppressWarnings("unchecked") public void setUp() throws Exception { super.setUp(); // Set-up cluster state @@ -99,8 +90,8 @@ public void setUp() throws Exception { .name("frozen-node") .roles(Set.of(DiscoveryNodeRole.DATA_FROZEN_NODE_ROLE)) .build(); - searchNode = DiscoveryNodeUtils.builder("search-node").name("search-node").roles(Set.of(DiscoveryNodeRole.SEARCH_ROLE)).build(); - searchAndIndexNode = DiscoveryNodeUtils.builder("search-and-index-node") + var searchNode = DiscoveryNodeUtils.builder("search-node").name("search-node").roles(Set.of(DiscoveryNodeRole.SEARCH_ROLE)).build(); + var searchAndIndexNode = DiscoveryNodeUtils.builder("search-and-index-node") .name("search-and-index-node") .roles(Set.of(DiscoveryNodeRole.SEARCH_ROLE, DiscoveryNodeRole.INDEX_ROLE)) .build(); @@ -120,11 +111,21 @@ public void setUp() throws Exception { when(clusterService.localNode()).thenReturn(node); // Set-up node service with a node with a healthy disk space usage - nodeService = mock(NodeService.class); client = mock(Client.class); - featureService = new FeatureService(List.of(new HealthFeatures())); + FeatureService featureService = new FeatureService(List.of(new HealthFeatures())); + + mockHealthTracker = new MockHealthTracker(); + + localHealthMonitor = LocalHealthMonitor.create( + Settings.EMPTY, + clusterService, + threadPool, + client, + featureService, + List.of(mockHealthTracker) + ); } @SuppressWarnings("unchecked") @@ -136,20 +137,12 @@ public void testUpdateHealthInfo() throws Exception { listener.onResponse(null); return null; }).when(client).execute(any(), any(), any()); - simulateHealthDiskSpace(); - LocalHealthMonitor localHealthMonitor = LocalHealthMonitor.create( - Settings.EMPTY, - clusterService, - nodeService, - threadPool, - client, - featureService - ); + // We override the poll interval like this to avoid the min value set by the setting which is too high for this test localHealthMonitor.setMonitorInterval(TimeValue.timeValueMillis(10)); - assertThat(localHealthMonitor.getLastReportedDiskHealthInfo(), nullValue()); + assertThat(mockHealthTracker.getLastReportedValue(), nullValue()); localHealthMonitor.clusterChanged(new ClusterChangedEvent("initialize", clusterState, ClusterState.EMPTY_STATE)); - assertBusy(() -> assertThat(localHealthMonitor.getLastReportedDiskHealthInfo(), equalTo(GREEN))); + assertBusy(() -> assertThat(mockHealthTracker.getLastReportedValue(), equalTo(GREEN))); } @SuppressWarnings("unchecked") @@ -162,18 +155,9 @@ public void testDoNotUpdateHealthInfoOnFailure() throws Exception { return null; }).when(client).execute(any(), any(), any()); - simulateHealthDiskSpace(); - LocalHealthMonitor localHealthMonitor = LocalHealthMonitor.create( - Settings.EMPTY, - clusterService, - nodeService, - threadPool, - client, - featureService - ); localHealthMonitor.clusterChanged(new ClusterChangedEvent("initialize", clusterState, ClusterState.EMPTY_STATE)); assertBusy(() -> assertThat(clientCalled.get(), equalTo(true))); - assertThat(localHealthMonitor.getLastReportedDiskHealthInfo(), nullValue()); + assertThat(mockHealthTracker.getLastReportedValue(), nullValue()); } @SuppressWarnings("unchecked") @@ -182,7 +166,6 @@ public void testSendHealthInfoToNewNode() throws Exception { .copyAndUpdate(b -> b.putCustom(HealthMetadata.TYPE, healthMetadata)); ClusterState current = ClusterStateCreationUtils.state(node, node, node, new DiscoveryNode[] { node, frozenNode }) .copyAndUpdate(b -> b.putCustom(HealthMetadata.TYPE, healthMetadata)); - simulateHealthDiskSpace(); AtomicInteger counter = new AtomicInteger(0); doAnswer(invocation -> { @@ -195,16 +178,8 @@ public void testSendHealthInfoToNewNode() throws Exception { }).when(client).execute(any(), any(), any()); when(clusterService.state()).thenReturn(previous); - LocalHealthMonitor localHealthMonitor = LocalHealthMonitor.create( - Settings.EMPTY, - clusterService, - nodeService, - threadPool, - client, - featureService - ); localHealthMonitor.clusterChanged(new ClusterChangedEvent("start-up", previous, ClusterState.EMPTY_STATE)); - assertBusy(() -> assertThat(localHealthMonitor.getLastReportedDiskHealthInfo(), equalTo(GREEN))); + assertBusy(() -> assertThat(mockHealthTracker.getLastReportedValue(), equalTo(GREEN))); localHealthMonitor.clusterChanged(new ClusterChangedEvent("health-node-switch", current, previous)); assertBusy(() -> assertThat(counter.get(), equalTo(2))); } @@ -215,7 +190,6 @@ public void testResendHealthInfoOnMasterChange() throws Exception { .copyAndUpdate(b -> b.putCustom(HealthMetadata.TYPE, healthMetadata)); ClusterState current = ClusterStateCreationUtils.state(node, frozenNode, node, new DiscoveryNode[] { node, frozenNode }) .copyAndUpdate(b -> b.putCustom(HealthMetadata.TYPE, healthMetadata)); - simulateHealthDiskSpace(); AtomicInteger counter = new AtomicInteger(0); doAnswer(invocation -> { @@ -228,16 +202,8 @@ public void testResendHealthInfoOnMasterChange() throws Exception { }).when(client).execute(any(), any(), any()); when(clusterService.state()).thenReturn(previous); - LocalHealthMonitor localHealthMonitor = LocalHealthMonitor.create( - Settings.EMPTY, - clusterService, - nodeService, - threadPool, - client, - featureService - ); localHealthMonitor.clusterChanged(new ClusterChangedEvent("start-up", previous, ClusterState.EMPTY_STATE)); - assertBusy(() -> assertThat(localHealthMonitor.getLastReportedDiskHealthInfo(), equalTo(GREEN))); + assertBusy(() -> assertThat(mockHealthTracker.getLastReportedValue(), equalTo(GREEN))); localHealthMonitor.clusterChanged(new ClusterChangedEvent("health-node-switch", current, previous)); assertBusy(() -> assertThat(counter.get(), equalTo(2))); } @@ -251,276 +217,46 @@ public void testEnablingAndDisabling() throws Exception { listener.onResponse(null); return null; }).when(client).execute(any(), any(), any()); - simulateHealthDiskSpace(); when(clusterService.state()).thenReturn(null); - LocalHealthMonitor localHealthMonitor = LocalHealthMonitor.create( - Settings.EMPTY, - clusterService, - nodeService, - threadPool, - client, - featureService - ); // Ensure that there are no issues if the cluster state hasn't been initialized yet localHealthMonitor.setEnabled(true); - assertThat(localHealthMonitor.getLastReportedDiskHealthInfo(), nullValue()); + assertThat(mockHealthTracker.getLastReportedValue(), nullValue()); assertThat(clientCalledCount.get(), equalTo(0)); when(clusterService.state()).thenReturn(clusterState); localHealthMonitor.clusterChanged(new ClusterChangedEvent("test", clusterState, ClusterState.EMPTY_STATE)); - assertBusy(() -> assertThat(localHealthMonitor.getLastReportedDiskHealthInfo(), equalTo(GREEN))); + assertBusy(() -> assertThat(mockHealthTracker.getLastReportedValue(), equalTo(GREEN))); assertThat(clientCalledCount.get(), equalTo(1)); + DiskHealthInfo nextHealthStatus = new DiskHealthInfo(HealthStatus.RED, DiskHealthInfo.Cause.NODE_OVER_THE_FLOOD_STAGE_THRESHOLD); + // Disable the local monitoring localHealthMonitor.setEnabled(false); localHealthMonitor.setMonitorInterval(TimeValue.timeValueMillis(1)); - simulateDiskOutOfSpace(); + mockHealthTracker.setHealthInfo(nextHealthStatus); assertThat(clientCalledCount.get(), equalTo(1)); localHealthMonitor.setMonitorInterval(TimeValue.timeValueSeconds(30)); localHealthMonitor.setEnabled(true); - DiskHealthInfo nextHealthStatus = new DiskHealthInfo(HealthStatus.RED, DiskHealthInfo.Cause.NODE_OVER_THE_FLOOD_STAGE_THRESHOLD); - assertBusy(() -> assertThat(localHealthMonitor.getLastReportedDiskHealthInfo(), equalTo(nextHealthStatus))); - } - - public void testNoDiskData() { - when( - nodeService.stats( - eq(CommonStatsFlags.NONE), - eq(false), - eq(false), - eq(false), - eq(false), - eq(false), - eq(true), - eq(false), - eq(false), - eq(false), - eq(false), - eq(false), - eq(false), - eq(false), - eq(false), - eq(false), - eq(false) - ) - ).thenReturn(nodeStats()); - LocalHealthMonitor.DiskCheck diskCheck = new LocalHealthMonitor.DiskCheck(nodeService); - DiskHealthInfo diskHealth = diskCheck.getHealth(healthMetadata, clusterState); - assertThat(diskHealth, equalTo(new DiskHealthInfo(HealthStatus.UNKNOWN, DiskHealthInfo.Cause.NODE_HAS_NO_DISK_STATS))); - } - - public void testGreenDiskStatus() { - simulateHealthDiskSpace(); - LocalHealthMonitor.DiskCheck diskMonitor = new LocalHealthMonitor.DiskCheck(nodeService); - DiskHealthInfo diskHealth = diskMonitor.getHealth(healthMetadata, clusterState); - assertThat(diskHealth, equalTo(GREEN)); - } - - public void testYellowDiskStatus() { - initializeIncreasedDiskSpaceUsage(); - LocalHealthMonitor.DiskCheck diskMonitor = new LocalHealthMonitor.DiskCheck(nodeService); - DiskHealthInfo diskHealth = diskMonitor.getHealth(healthMetadata, clusterState); - assertThat(diskHealth, equalTo(new DiskHealthInfo(HealthStatus.YELLOW, DiskHealthInfo.Cause.NODE_OVER_HIGH_THRESHOLD))); - } - - public void testRedDiskStatus() { - simulateDiskOutOfSpace(); - LocalHealthMonitor.DiskCheck diskMonitor = new LocalHealthMonitor.DiskCheck(nodeService); - DiskHealthInfo diskHealth = diskMonitor.getHealth(healthMetadata, clusterState); - assertThat(diskHealth, equalTo(new DiskHealthInfo(HealthStatus.RED, DiskHealthInfo.Cause.NODE_OVER_THE_FLOOD_STAGE_THRESHOLD))); - } - - public void testFrozenGreenDiskStatus() { - simulateHealthDiskSpace(); - ClusterState clusterStateFrozenLocalNode = clusterState.copyAndUpdate( - b -> b.nodes(DiscoveryNodes.builder().add(node).add(frozenNode).localNodeId(frozenNode.getId()).build()) - ); - LocalHealthMonitor.DiskCheck diskMonitor = new LocalHealthMonitor.DiskCheck(nodeService); - DiskHealthInfo diskHealth = diskMonitor.getHealth(healthMetadata, clusterStateFrozenLocalNode); - assertThat(diskHealth, equalTo(GREEN)); - } - - public void testFrozenRedDiskStatus() { - simulateDiskOutOfSpace(); - ClusterState clusterStateFrozenLocalNode = clusterState.copyAndUpdate( - b -> b.nodes(DiscoveryNodes.builder().add(node).add(frozenNode).localNodeId(frozenNode.getId()).build()) - ); - LocalHealthMonitor.DiskCheck diskMonitor = new LocalHealthMonitor.DiskCheck(nodeService); - DiskHealthInfo diskHealth = diskMonitor.getHealth(healthMetadata, clusterStateFrozenLocalNode); - assertThat(diskHealth, equalTo(new DiskHealthInfo(HealthStatus.RED, DiskHealthInfo.Cause.FROZEN_NODE_OVER_FLOOD_STAGE_THRESHOLD))); - } - - public void testSearchNodeGreenDiskStatus() { - // Search-only nodes behave like frozen nodes -- they are RED at 95% full, GREEN otherwise. - initializeIncreasedDiskSpaceUsage(); - ClusterState clusterStateSearchLocalNode = clusterState.copyAndUpdate( - b -> b.nodes(DiscoveryNodes.builder().add(node).add(searchNode).localNodeId(searchNode.getId()).build()) - ); - LocalHealthMonitor.DiskCheck diskMonitor = new LocalHealthMonitor.DiskCheck(nodeService); - DiskHealthInfo diskHealth = diskMonitor.getHealth(healthMetadata, clusterStateSearchLocalNode); - assertThat(diskHealth, equalTo(GREEN)); - } - - public void testSearchNodeRedDiskStatus() { - // Search-only nodes behave like frozen nodes -- they are RED at 95% full, GREEN otherwise. - simulateDiskOutOfSpace(); - ClusterState clusterStateSearchLocalNode = clusterState.copyAndUpdate( - b -> b.nodes(DiscoveryNodes.builder().add(node).add(searchNode).localNodeId(searchNode.getId()).build()) - ); - LocalHealthMonitor.DiskCheck diskMonitor = new LocalHealthMonitor.DiskCheck(nodeService); - DiskHealthInfo diskHealth = diskMonitor.getHealth(healthMetadata, clusterStateSearchLocalNode); - assertThat(diskHealth, equalTo(new DiskHealthInfo(HealthStatus.RED, DiskHealthInfo.Cause.FROZEN_NODE_OVER_FLOOD_STAGE_THRESHOLD))); - } - - public void testSearchAndIndexNodesYellowDiskStatus() { - // A search role mixed with another data node role behaves like an ordinary data node -- YELLOW at 90% full. - initializeIncreasedDiskSpaceUsage(); - ClusterState clusterStateSearchLocalNode = clusterState.copyAndUpdate( - b -> b.nodes(DiscoveryNodes.builder().add(node).add(searchAndIndexNode).localNodeId(searchAndIndexNode.getId()).build()) - ); - LocalHealthMonitor.DiskCheck diskMonitor = new LocalHealthMonitor.DiskCheck(nodeService); - DiskHealthInfo diskHealth = diskMonitor.getHealth(healthMetadata, clusterStateSearchLocalNode); - assertThat(diskHealth, equalTo(new DiskHealthInfo(HealthStatus.YELLOW, DiskHealthInfo.Cause.NODE_OVER_HIGH_THRESHOLD))); + assertBusy(() -> assertThat(mockHealthTracker.getLastReportedValue(), equalTo(nextHealthStatus))); } - public void testYellowStatusForNonDataNode() { - DiscoveryNode dedicatedMasterNode = DiscoveryNodeUtils.builder("master-node-1") - .name("master-node") - .roles(Set.of(DiscoveryNodeRole.MASTER_ROLE)) - .build(); - clusterState = ClusterStateCreationUtils.state( - dedicatedMasterNode, - dedicatedMasterNode, - node, - new DiscoveryNode[] { node, dedicatedMasterNode } - ).copyAndUpdate(b -> b.putCustom(HealthMetadata.TYPE, healthMetadata)); - - initializeIncreasedDiskSpaceUsage(); - LocalHealthMonitor.DiskCheck diskMonitor = new LocalHealthMonitor.DiskCheck(nodeService); - DiskHealthInfo diskHealth = diskMonitor.getHealth(healthMetadata, clusterState); - assertThat(diskHealth, equalTo(new DiskHealthInfo(HealthStatus.YELLOW, DiskHealthInfo.Cause.NODE_OVER_HIGH_THRESHOLD))); - } - - public void testHasRelocatingShards() { - String indexName = "my-index"; - final ClusterState state = state(indexName, true, ShardRoutingState.RELOCATING); - // local node coincides with the node hosting the (relocating) primary shard - DiscoveryNode localNode = state.nodes().getLocalNode(); - assertThat(LocalHealthMonitor.DiskCheck.hasRelocatingShards(state, localNode), is(true)); - - DiscoveryNode dedicatedMasterNode = DiscoveryNodeUtils.builder("master-node-1") - .name("master-node") - .roles(Set.of(DiscoveryNodeRole.MASTER_ROLE)) - .build(); - ClusterState newState = ClusterState.builder(state) - .nodes(new DiscoveryNodes.Builder(state.nodes()).add(dedicatedMasterNode)) - .build(); - assertThat(LocalHealthMonitor.DiskCheck.hasRelocatingShards(newState, dedicatedMasterNode), is(false)); - } - - private void simulateDiskOutOfSpace() { - when( - nodeService.stats( - eq(CommonStatsFlags.NONE), - eq(false), - eq(false), - eq(false), - eq(false), - eq(false), - eq(true), - eq(false), - eq(false), - eq(false), - eq(false), - eq(false), - eq(false), - eq(false), - eq(false), - eq(false), - eq(false) - ) - ).thenReturn(nodeStats(1000, 10)); - } - - private void initializeIncreasedDiskSpaceUsage() { - when( - nodeService.stats( - eq(CommonStatsFlags.NONE), - eq(false), - eq(false), - eq(false), - eq(false), - eq(false), - eq(true), - eq(false), - eq(false), - eq(false), - eq(false), - eq(false), - eq(false), - eq(false), - eq(false), - eq(false), - eq(false) - ) - ).thenReturn(nodeStats(1000, 80)); - } + private static class MockHealthTracker extends HealthTracker { + private DiskHealthInfo healthInfo = GREEN; - private void simulateHealthDiskSpace() { - when( - nodeService.stats( - eq(CommonStatsFlags.NONE), - eq(false), - eq(false), - eq(false), - eq(false), - eq(false), - eq(true), - eq(false), - eq(false), - eq(false), - eq(false), - eq(false), - eq(false), - eq(false), - eq(false), - eq(false), - eq(false) - ) - ).thenReturn(nodeStats(1000, 110)); - } + @Override + public DiskHealthInfo checkCurrentHealth() { + return healthInfo; + } - private NodeStats nodeStats(long total, long available) { - final FsInfo fs = new FsInfo(-1, null, new FsInfo.Path[] { new FsInfo.Path(null, null, total, 10, available) }); - return nodeStats(fs); - } + @Override + public void addToRequestBuilder(UpdateHealthInfoCacheAction.Request.Builder builder, DiskHealthInfo healthInfo) { + builder.diskHealthInfo(healthInfo); + } - private NodeStats nodeStats() { - return nodeStats(null); - } - - private NodeStats nodeStats(FsInfo fs) { - return new NodeStats( - node, // ignored - randomMillisUpToYear9999(), - null, - null, - null, - null, - null, - fs, - null, - null, - null, - null, - null, - null, - null, - null, - null, - null - ); + public void setHealthInfo(DiskHealthInfo healthInfo) { + this.healthInfo = healthInfo; + } } } diff --git a/server/src/test/java/org/elasticsearch/health/node/UpdateHealthInfoCacheActionTests.java b/server/src/test/java/org/elasticsearch/health/node/UpdateHealthInfoCacheActionTests.java index 500ecc85c2ac2..56d152e15d4b7 100644 --- a/server/src/test/java/org/elasticsearch/health/node/UpdateHealthInfoCacheActionTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/UpdateHealthInfoCacheActionTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.health.HealthStatus; +import org.elasticsearch.health.node.UpdateHealthInfoCacheAction.Request; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.EqualsHashCodeTestUtils; import org.elasticsearch.test.transport.CapturingTransport; @@ -88,7 +89,7 @@ public static void afterClass() { public void testAction() throws ExecutionException, InterruptedException { DiskHealthInfo diskHealthInfo = new DiskHealthInfo(HealthStatus.GREEN, null); - UpdateHealthInfoCacheAction.Request request = new UpdateHealthInfoCacheAction.Request(localNode.getId(), diskHealthInfo); + Request request = new Request.Builder().nodeId(localNode.getId()).diskHealthInfo(diskHealthInfo).build(); PlainActionFuture listener = new PlainActionFuture<>(); setState(clusterService, ClusterStateCreationUtils.state(localNode, localNode, localNode, allNodes)); HealthInfoCache healthInfoCache = HealthInfoCache.create(clusterService); @@ -111,28 +112,31 @@ public void testAction() throws ExecutionException, InterruptedException { } public void testRequestSerialization() { - DiskHealthInfo diskHealthInfo = randomBoolean() - ? new DiskHealthInfo(randomFrom(HealthStatus.values())) - : new DiskHealthInfo(randomFrom(HealthStatus.values()), randomFrom(DiskHealthInfo.Cause.values())); - UpdateHealthInfoCacheAction.Request request = new UpdateHealthInfoCacheAction.Request(randomAlphaOfLength(10), diskHealthInfo); + // We start off with an "empty" request (i.e. only nodeId set), and let #mutateRequest change one of the fields at a time. + Request request = new Request.Builder().nodeId(randomAlphaOfLength(10)).build(); EqualsHashCodeTestUtils.checkEqualsAndHashCode( request, - serializedRequest -> copyWriteable(serializedRequest, writableRegistry(), UpdateHealthInfoCacheAction.Request::new), + serializedRequest -> copyWriteable(serializedRequest, writableRegistry(), Request::new), this::mutateRequest ); } - private UpdateHealthInfoCacheAction.Request mutateRequest(UpdateHealthInfoCacheAction.Request request) { + private Request mutateRequest(Request request) { String nodeId = request.getNodeId(); DiskHealthInfo diskHealthInfo = request.getDiskHealthInfo(); - switch (randomIntBetween(1, 2)) { - case 1 -> nodeId = randomAlphaOfLength(10); - case 2 -> diskHealthInfo = new DiskHealthInfo( - randomValueOtherThan(diskHealthInfo.healthStatus(), () -> randomFrom(HealthStatus.values())), - randomBoolean() ? null : randomFrom(DiskHealthInfo.Cause.values()) - ); + var dslHealthInfo = request.getDslHealthInfo(); + var repoHealthInfo = request.getRepositoriesHealthInfo(); + switch (randomInt(3)) { + case 0 -> nodeId = randomAlphaOfLength(10); + case 1 -> diskHealthInfo = randomValueOtherThan(diskHealthInfo, HealthInfoTests::randomDiskHealthInfo); + case 2 -> dslHealthInfo = randomValueOtherThan(dslHealthInfo, HealthInfoTests::randomDslHealthInfo); + case 3 -> repoHealthInfo = randomValueOtherThan(repoHealthInfo, HealthInfoTests::randomRepoHealthInfo); default -> throw new IllegalStateException(); } - return new UpdateHealthInfoCacheAction.Request(nodeId, diskHealthInfo); + return new Request.Builder().nodeId(nodeId) + .diskHealthInfo(diskHealthInfo) + .dslHealthInfo(dslHealthInfo) + .repositoriesHealthInfo(repoHealthInfo) + .build(); } } diff --git a/server/src/test/java/org/elasticsearch/health/node/tracker/DiskHealthTrackerTests.java b/server/src/test/java/org/elasticsearch/health/node/tracker/DiskHealthTrackerTests.java new file mode 100644 index 0000000000000..7089e5a19bc63 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/health/node/tracker/DiskHealthTrackerTests.java @@ -0,0 +1,331 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.health.node.tracker; + +import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; +import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; +import org.elasticsearch.action.support.replication.ClusterStateCreationUtils; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.cluster.node.DiscoveryNodeUtils; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.RelativeByteSizeValue; +import org.elasticsearch.health.HealthStatus; +import org.elasticsearch.health.metadata.HealthMetadata; +import org.elasticsearch.health.node.DiskHealthInfo; +import org.elasticsearch.monitor.fs.FsInfo; +import org.elasticsearch.node.NodeService; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.util.Set; + +import static org.elasticsearch.action.support.replication.ClusterStateCreationUtils.state; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class DiskHealthTrackerTests extends ESTestCase { + + private static final DiskHealthInfo GREEN = new DiskHealthInfo(HealthStatus.GREEN, null); + private NodeService nodeService; + private ClusterService clusterService; + private DiscoveryNode node; + private DiscoveryNode frozenNode; + private DiscoveryNode searchNode; + private DiscoveryNode searchAndIndexNode; + private HealthMetadata healthMetadata; + private ClusterState clusterState; + private DiskHealthTracker diskHealthTracker; + + @Before + public void setUp() throws Exception { + super.setUp(); + // Set-up cluster state + healthMetadata = new HealthMetadata( + HealthMetadata.Disk.newBuilder() + .highWatermark(new RelativeByteSizeValue(ByteSizeValue.ofBytes(100))) + .floodStageWatermark(new RelativeByteSizeValue(ByteSizeValue.ofBytes(50))) + .frozenFloodStageWatermark(new RelativeByteSizeValue(ByteSizeValue.ofBytes(50))) + .frozenFloodStageMaxHeadroom(ByteSizeValue.ofBytes(10)) + .build(), + HealthMetadata.ShardLimits.newBuilder().maxShardsPerNode(999).maxShardsPerNodeFrozen(100).build() + ); + node = DiscoveryNodeUtils.create("node", "node"); + frozenNode = DiscoveryNodeUtils.builder("frozen-node") + .name("frozen-node") + .roles(Set.of(DiscoveryNodeRole.DATA_FROZEN_NODE_ROLE)) + .build(); + searchNode = DiscoveryNodeUtils.builder("search-node").name("search-node").roles(Set.of(DiscoveryNodeRole.SEARCH_ROLE)).build(); + searchAndIndexNode = DiscoveryNodeUtils.builder("search-and-index-node") + .name("search-and-index-node") + .roles(Set.of(DiscoveryNodeRole.SEARCH_ROLE, DiscoveryNodeRole.INDEX_ROLE)) + .build(); + clusterState = ClusterStateCreationUtils.state( + node, + node, + node, + new DiscoveryNode[] { node, frozenNode, searchNode, searchAndIndexNode } + ).copyAndUpdate(b -> b.putCustom(HealthMetadata.TYPE, healthMetadata)); + + // Set-up cluster service + clusterService = mock(ClusterService.class); + when(clusterService.getClusterSettings()).thenReturn( + new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS) + ); + when(clusterService.state()).thenReturn(clusterState); + when(clusterService.localNode()).thenReturn(node); + + // Set-up node service with a node with a healthy disk space usage + nodeService = mock(NodeService.class); + + diskHealthTracker = new DiskHealthTracker(nodeService, clusterService); + } + + public void testNoDiskData() { + when( + nodeService.stats( + eq(CommonStatsFlags.NONE), + eq(false), + eq(false), + eq(false), + eq(false), + eq(false), + eq(true), + eq(false), + eq(false), + eq(false), + eq(false), + eq(false), + eq(false), + eq(false), + eq(false), + eq(false), + eq(false) + ) + ).thenReturn(nodeStats()); + DiskHealthInfo diskHealth = diskHealthTracker.checkCurrentHealth(); + assertEquals(new DiskHealthInfo(HealthStatus.UNKNOWN, DiskHealthInfo.Cause.NODE_HAS_NO_DISK_STATS), diskHealth); + } + + public void testGreenDiskStatus() { + simulateHealthDiskSpace(); + DiskHealthInfo diskHealth = diskHealthTracker.checkCurrentHealth(); + assertEquals(GREEN, diskHealth); + } + + public void testYellowDiskStatus() { + initializeIncreasedDiskSpaceUsage(); + DiskHealthInfo diskHealth = diskHealthTracker.checkCurrentHealth(); + assertEquals(new DiskHealthInfo(HealthStatus.YELLOW, DiskHealthInfo.Cause.NODE_OVER_HIGH_THRESHOLD), diskHealth); + } + + public void testRedDiskStatus() { + simulateDiskOutOfSpace(); + DiskHealthInfo diskHealth = diskHealthTracker.checkCurrentHealth(); + assertEquals(new DiskHealthInfo(HealthStatus.RED, DiskHealthInfo.Cause.NODE_OVER_THE_FLOOD_STAGE_THRESHOLD), diskHealth); + } + + public void testFrozenGreenDiskStatus() { + simulateHealthDiskSpace(); + ClusterState clusterStateFrozenLocalNode = clusterState.copyAndUpdate( + b -> b.nodes(DiscoveryNodes.builder().add(node).add(frozenNode).localNodeId(frozenNode.getId()).build()) + ); + when(clusterService.state()).thenReturn(clusterStateFrozenLocalNode); + DiskHealthInfo diskHealth = diskHealthTracker.checkCurrentHealth(); + assertEquals(GREEN, diskHealth); + } + + public void testFrozenRedDiskStatus() { + simulateDiskOutOfSpace(); + ClusterState clusterStateFrozenLocalNode = clusterState.copyAndUpdate( + b -> b.nodes(DiscoveryNodes.builder().add(node).add(frozenNode).localNodeId(frozenNode.getId()).build()) + ); + when(clusterService.state()).thenReturn(clusterStateFrozenLocalNode); + DiskHealthInfo diskHealth = diskHealthTracker.checkCurrentHealth(); + assertEquals(new DiskHealthInfo(HealthStatus.RED, DiskHealthInfo.Cause.FROZEN_NODE_OVER_FLOOD_STAGE_THRESHOLD), diskHealth); + } + + public void testSearchNodeGreenDiskStatus() { + // Search-only nodes behave like frozen nodes -- they are RED at 95% full, GREEN otherwise. + initializeIncreasedDiskSpaceUsage(); + ClusterState clusterStateSearchLocalNode = clusterState.copyAndUpdate( + b -> b.nodes(DiscoveryNodes.builder().add(node).add(searchNode).localNodeId(searchNode.getId()).build()) + ); + when(clusterService.state()).thenReturn(clusterStateSearchLocalNode); + DiskHealthInfo diskHealth = diskHealthTracker.checkCurrentHealth(); + assertEquals(GREEN, diskHealth); + } + + public void testSearchNodeRedDiskStatus() { + // Search-only nodes behave like frozen nodes -- they are RED at 95% full, GREEN otherwise. + simulateDiskOutOfSpace(); + ClusterState clusterStateSearchLocalNode = clusterState.copyAndUpdate( + b -> b.nodes(DiscoveryNodes.builder().add(node).add(searchNode).localNodeId(searchNode.getId()).build()) + ); + when(clusterService.state()).thenReturn(clusterStateSearchLocalNode); + DiskHealthInfo diskHealth = diskHealthTracker.checkCurrentHealth(); + assertEquals(new DiskHealthInfo(HealthStatus.RED, DiskHealthInfo.Cause.FROZEN_NODE_OVER_FLOOD_STAGE_THRESHOLD), diskHealth); + } + + public void testSearchAndIndexNodesYellowDiskStatus() { + // A search role mixed with another data node role behaves like an ordinary data node -- YELLOW at 90% full. + initializeIncreasedDiskSpaceUsage(); + ClusterState clusterStateSearchLocalNode = clusterState.copyAndUpdate( + b -> b.nodes(DiscoveryNodes.builder().add(node).add(searchAndIndexNode).localNodeId(searchAndIndexNode.getId()).build()) + ); + when(clusterService.state()).thenReturn(clusterStateSearchLocalNode); + DiskHealthInfo diskHealth = diskHealthTracker.checkCurrentHealth(); + assertEquals(new DiskHealthInfo(HealthStatus.YELLOW, DiskHealthInfo.Cause.NODE_OVER_HIGH_THRESHOLD), diskHealth); + } + + public void testYellowStatusForNonDataNode() { + DiscoveryNode dedicatedMasterNode = DiscoveryNodeUtils.builder("master-node-1") + .name("master-node") + .roles(Set.of(DiscoveryNodeRole.MASTER_ROLE)) + .build(); + clusterState = ClusterStateCreationUtils.state( + dedicatedMasterNode, + dedicatedMasterNode, + node, + new DiscoveryNode[] { node, dedicatedMasterNode } + ).copyAndUpdate(b -> b.putCustom(HealthMetadata.TYPE, healthMetadata)); + + initializeIncreasedDiskSpaceUsage(); + DiskHealthInfo diskHealth = diskHealthTracker.checkCurrentHealth(); + assertEquals(new DiskHealthInfo(HealthStatus.YELLOW, DiskHealthInfo.Cause.NODE_OVER_HIGH_THRESHOLD), diskHealth); + } + + public void testHasRelocatingShards() { + String indexName = "my-index"; + final ClusterState state = state(indexName, true, ShardRoutingState.RELOCATING); + // local node coincides with the node hosting the (relocating) primary shard + DiscoveryNode localNode = state.nodes().getLocalNode(); + assertTrue(DiskHealthTracker.hasRelocatingShards(state, localNode)); + + DiscoveryNode dedicatedMasterNode = DiscoveryNodeUtils.builder("master-node-1") + .name("master-node") + .roles(Set.of(DiscoveryNodeRole.MASTER_ROLE)) + .build(); + ClusterState newState = ClusterState.builder(state) + .nodes(new DiscoveryNodes.Builder(state.nodes()).add(dedicatedMasterNode)) + .build(); + assertFalse(DiskHealthTracker.hasRelocatingShards(newState, dedicatedMasterNode)); + } + + private void simulateDiskOutOfSpace() { + when( + nodeService.stats( + eq(CommonStatsFlags.NONE), + eq(false), + eq(false), + eq(false), + eq(false), + eq(false), + eq(true), + eq(false), + eq(false), + eq(false), + eq(false), + eq(false), + eq(false), + eq(false), + eq(false), + eq(false), + eq(false) + ) + ).thenReturn(nodeStats(1000, 10)); + } + + private void initializeIncreasedDiskSpaceUsage() { + when( + nodeService.stats( + eq(CommonStatsFlags.NONE), + eq(false), + eq(false), + eq(false), + eq(false), + eq(false), + eq(true), + eq(false), + eq(false), + eq(false), + eq(false), + eq(false), + eq(false), + eq(false), + eq(false), + eq(false), + eq(false) + ) + ).thenReturn(nodeStats(1000, 80)); + } + + private void simulateHealthDiskSpace() { + when( + nodeService.stats( + eq(CommonStatsFlags.NONE), + eq(false), + eq(false), + eq(false), + eq(false), + eq(false), + eq(true), + eq(false), + eq(false), + eq(false), + eq(false), + eq(false), + eq(false), + eq(false), + eq(false), + eq(false), + eq(false) + ) + ).thenReturn(nodeStats(1000, 110)); + } + + private NodeStats nodeStats(long total, long available) { + final FsInfo fs = new FsInfo(-1, null, new FsInfo.Path[] { new FsInfo.Path(null, null, total, 10, available) }); + return nodeStats(fs); + } + + private NodeStats nodeStats() { + return nodeStats(null); + } + + private NodeStats nodeStats(FsInfo fs) { + return new NodeStats( + node, // ignored + randomMillisUpToYear9999(), + null, + null, + null, + null, + null, + fs, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null + ); + } +} diff --git a/server/src/test/java/org/elasticsearch/health/node/tracker/RepositoriesHealthTrackerTests.java b/server/src/test/java/org/elasticsearch/health/node/tracker/RepositoriesHealthTrackerTests.java new file mode 100644 index 0000000000000..0b5f09acc69ca --- /dev/null +++ b/server/src/test/java/org/elasticsearch/health/node/tracker/RepositoriesHealthTrackerTests.java @@ -0,0 +1,112 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.health.node.tracker; + +import org.elasticsearch.cluster.metadata.RepositoryMetadata; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.health.node.RepositoriesHealthInfo; +import org.elasticsearch.health.node.UpdateHealthInfoCacheAction; +import org.elasticsearch.repositories.InvalidRepository; +import org.elasticsearch.repositories.RepositoriesService; +import org.elasticsearch.repositories.Repository; +import org.elasticsearch.repositories.RepositoryException; +import org.elasticsearch.repositories.UnknownTypeRepository; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.util.List; +import java.util.Map; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class RepositoriesHealthTrackerTests extends ESTestCase { + + private RepositoriesHealthTracker repositoriesHealthTracker; + + private RepositoriesService repositoriesService; + + @Before + public void setUp() throws Exception { + super.setUp(); + + repositoriesService = mock(RepositoriesService.class); + + repositoriesHealthTracker = new RepositoriesHealthTracker(repositoriesService); + } + + public void testGetHealthNoRepos() { + when(repositoriesService.getRepositories()).thenReturn(Map.of()); + + var health = repositoriesHealthTracker.checkCurrentHealth(); + + assertTrue(health.unknownRepositories().isEmpty()); + assertTrue(health.invalidRepositories().isEmpty()); + } + + public void testGetHealthCorrectRepo() { + var metadata = mock(RepositoryMetadata.class); + // generation should be != RepositoryData.UNKNOWN_REPO_GEN which is equal to -2. + when(metadata.generation()).thenReturn(randomNonNegativeLong()); + var repo = mock(Repository.class); + when(repo.getMetadata()).thenReturn(metadata); + when(repositoriesService.getRepositories()).thenReturn(Map.of(randomAlphaOfLength(10), repo)); + + var health = repositoriesHealthTracker.checkCurrentHealth(); + + assertTrue(health.unknownRepositories().isEmpty()); + assertTrue(health.invalidRepositories().isEmpty()); + } + + public void testGetHealthUnknownType() { + var repo = createRepositoryMetadata(); + when(repositoriesService.getRepositories()).thenReturn(Map.of(randomAlphaOfLength(10), new UnknownTypeRepository(repo))); + + var health = repositoriesHealthTracker.checkCurrentHealth(); + + assertEquals(1, health.unknownRepositories().size()); + assertEquals(repo.name(), health.unknownRepositories().get(0)); + assertTrue(health.invalidRepositories().isEmpty()); + } + + public void testGetHealthInvalid() { + var repo = createRepositoryMetadata(); + when(repositoriesService.getRepositories()).thenReturn( + Map.of(repo.name(), new InvalidRepository(repo, new RepositoryException(repo.name(), "Test"))) + ); + + var health = repositoriesHealthTracker.checkCurrentHealth(); + + assertTrue(health.unknownRepositories().isEmpty()); + assertEquals(1, health.invalidRepositories().size()); + assertEquals(repo.name(), health.invalidRepositories().get(0)); + } + + public void testSetBuilder() { + var builder = mock(UpdateHealthInfoCacheAction.Request.Builder.class); + var health = new RepositoriesHealthInfo(List.of(), List.of()); + + repositoriesHealthTracker.addToRequestBuilder(builder, health); + + verify(builder).repositoriesHealthInfo(health); + } + + private static RepositoryMetadata createRepositoryMetadata() { + var generation = randomNonNegativeLong() / 2L; + return new RepositoryMetadata( + randomAlphaOfLength(10), + randomAlphaOfLength(10), + randomAlphaOfLength(10), + Settings.EMPTY, + generation, + generation + randomLongBetween(0, generation) + ); + } +} diff --git a/server/src/test/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceTests.java index 35713eec56f5a..0dfe27ee6dc50 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceTests.java @@ -13,48 +13,81 @@ import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.RepositoriesMetadata; import org.elasticsearch.cluster.metadata.RepositoryMetadata; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeUtils; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.health.Diagnosis; import org.elasticsearch.health.Diagnosis.Resource.Type; import org.elasticsearch.health.HealthIndicatorDetails; -import org.elasticsearch.health.HealthIndicatorImpact; import org.elasticsearch.health.HealthIndicatorResult; -import org.elasticsearch.health.ImpactArea; import org.elasticsearch.health.SimpleHealthIndicatorDetails; import org.elasticsearch.health.node.HealthInfo; +import org.elasticsearch.health.node.RepositoriesHealthInfo; import org.elasticsearch.test.ESTestCase; +import org.junit.Before; +import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Stream; +import static org.elasticsearch.cluster.node.DiscoveryNode.DISCOVERY_NODE_COMPARATOR; import static org.elasticsearch.common.util.CollectionUtils.appendToCopy; import static org.elasticsearch.health.HealthStatus.GREEN; -import static org.elasticsearch.health.HealthStatus.RED; +import static org.elasticsearch.health.HealthStatus.UNKNOWN; +import static org.elasticsearch.health.HealthStatus.YELLOW; import static org.elasticsearch.repositories.RepositoryData.CORRUPTED_REPO_GEN; import static org.elasticsearch.repositories.RepositoryData.EMPTY_REPO_GEN; -import static org.elasticsearch.snapshots.RepositoryIntegrityHealthIndicatorService.CORRUPTED_REPOSITORY; +import static org.elasticsearch.snapshots.RepositoryIntegrityHealthIndicatorService.CORRUPTED_DEFINITION; +import static org.elasticsearch.snapshots.RepositoryIntegrityHealthIndicatorService.INVALID_DEFINITION; import static org.elasticsearch.snapshots.RepositoryIntegrityHealthIndicatorService.NAME; +import static org.elasticsearch.snapshots.RepositoryIntegrityHealthIndicatorService.UNKNOWN_DEFINITION; import static org.hamcrest.Matchers.equalTo; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class RepositoryIntegrityHealthIndicatorServiceTests extends ESTestCase { - public void testIsGreenWhenAllRepositoriesAreNotCorrupted() { + private DiscoveryNode node1; + private DiscoveryNode node2; + private HealthInfo healthInfo; + + @Before + public void setUp() throws Exception { + super.setUp(); + + node1 = DiscoveryNodeUtils.create(randomAlphaOfLength(10), randomUUID()); + node2 = DiscoveryNodeUtils.create(randomAlphaOfLength(10), randomUUID()); + healthInfo = new HealthInfo( + Map.of(), + null, + new HashMap<>( + Map.of( + node1.getId(), + new RepositoriesHealthInfo(List.of(), List.of()), + node2.getId(), + new RepositoriesHealthInfo(List.of(), List.of()) + ) + ) + ); + } + + public void testIsGreenWhenAllRepositoriesAreHealthy() { var repos = randomList(1, 10, () -> createRepositoryMetadata("healthy-repo", false)); var clusterState = createClusterStateWith(new RepositoriesMetadata(repos)); - var service = createRepositoryCorruptionHealthIndicatorService(clusterState); + var service = createRepositoryIntegrityHealthIndicatorService(clusterState); assertThat( - service.calculate(true, HealthInfo.EMPTY_HEALTH_INFO), + service.calculate(true, healthInfo), equalTo( new HealthIndicatorResult( NAME, GREEN, - RepositoryIntegrityHealthIndicatorService.NO_CORRUPT_REPOS, + RepositoryIntegrityHealthIndicatorService.ALL_REPOS_HEALTHY, new SimpleHealthIndicatorDetails(Map.of("total_repositories", repos.size())), Collections.emptyList(), Collections.emptyList() @@ -63,35 +96,127 @@ public void testIsGreenWhenAllRepositoriesAreNotCorrupted() { ); } - public void testIsRedWhenAtLeastOneRepoIsCorrupted() { + public void testIsYellowWhenAtLeastOneRepoIsCorrupted() { var repos = appendToCopy( randomList(1, 10, () -> createRepositoryMetadata("healthy-repo", false)), createRepositoryMetadata("corrupted-repo", true) ); var clusterState = createClusterStateWith(new RepositoriesMetadata(repos)); - var service = createRepositoryCorruptionHealthIndicatorService(clusterState); + var service = createRepositoryIntegrityHealthIndicatorService(clusterState); List corruptedRepos = List.of("corrupted-repo"); assertThat( - service.calculate(true, HealthInfo.EMPTY_HEALTH_INFO), + service.calculate(true, healthInfo), equalTo( new HealthIndicatorResult( NAME, - RED, - "Detected [1] corrupted snapshot repositories: [corrupted-repo].", - new SimpleHealthIndicatorDetails( - Map.of("total_repositories", repos.size(), "corrupted_repositories", 1, "corrupted", corruptedRepos) - ), - Collections.singletonList( - new HealthIndicatorImpact( - NAME, - RepositoryIntegrityHealthIndicatorService.REPOSITORY_CORRUPTED_IMPACT_ID, - 1, - "Data in corrupted snapshot repository [corrupted-repo] may be lost and cannot be restored.", - List.of(ImpactArea.BACKUP) + YELLOW, + "Detected [1] corrupted snapshot repository.", + createDetails(repos.size(), 1, corruptedRepos, 0, 0), + RepositoryIntegrityHealthIndicatorService.IMPACTS, + List.of(new Diagnosis(CORRUPTED_DEFINITION, List.of(new Diagnosis.Resource(Type.SNAPSHOT_REPOSITORY, corruptedRepos)))) + ) + ) + ); + } + + public void testIsYellowWhenAtLeastOneRepoIsUnknown() { + var repos = randomList(1, 10, () -> createRepositoryMetadata("healthy-repo", false)); + repos.add(createRepositoryMetadata("unknown-repo", false)); + var clusterState = createClusterStateWith(new RepositoriesMetadata(repos)); + var service = createRepositoryIntegrityHealthIndicatorService(clusterState); + healthInfo.repositoriesInfoByNode().put(node1.getId(), new RepositoriesHealthInfo(List.of("unknown-repo"), List.of())); + + assertThat( + service.calculate(true, healthInfo), + equalTo( + new HealthIndicatorResult( + NAME, + YELLOW, + "Detected [1] unknown snapshot repository.", + createDetails(repos.size(), 0, List.of(), 1, 0), + RepositoryIntegrityHealthIndicatorService.IMPACTS, + List.of( + new Diagnosis( + UNKNOWN_DEFINITION, + List.of( + new Diagnosis.Resource(Type.SNAPSHOT_REPOSITORY, List.of("unknown-repo")), + new Diagnosis.Resource(List.of(node1)) + ) ) - ), - List.of(new Diagnosis(CORRUPTED_REPOSITORY, List.of(new Diagnosis.Resource(Type.SNAPSHOT_REPOSITORY, corruptedRepos)))) + ) + ) + ) + ); + } + + public void testIsYellowWhenAtLeastOneRepoIsInvalid() { + var repos = randomList(1, 10, () -> createRepositoryMetadata("healthy-repo", false)); + repos.add(createRepositoryMetadata("invalid-repo", false)); + var clusterState = createClusterStateWith(new RepositoriesMetadata(repos)); + var service = createRepositoryIntegrityHealthIndicatorService(clusterState); + healthInfo.repositoriesInfoByNode().put(node1.getId(), new RepositoriesHealthInfo(List.of(), List.of("invalid-repo"))); + + assertThat( + service.calculate(true, healthInfo), + equalTo( + new HealthIndicatorResult( + NAME, + YELLOW, + "Detected [1] invalid snapshot repository.", + createDetails(repos.size(), 0, List.of(), 0, 1), + RepositoryIntegrityHealthIndicatorService.IMPACTS, + List.of( + new Diagnosis( + INVALID_DEFINITION, + List.of( + new Diagnosis.Resource(Type.SNAPSHOT_REPOSITORY, List.of("invalid-repo")), + new Diagnosis.Resource(List.of(node1)) + ) + ) + ) + ) + ) + ); + } + + public void testIsYellowWhenEachRepoTypeIsPresent() { + var repos = randomList(1, 10, () -> createRepositoryMetadata("healthy-repo", false)); + repos.add(createRepositoryMetadata("corrupted-repo", true)); + repos.add(createRepositoryMetadata("unknown-repo", false)); + repos.add(createRepositoryMetadata("invalid-repo", false)); + var clusterState = createClusterStateWith(new RepositoriesMetadata(repos)); + var service = createRepositoryIntegrityHealthIndicatorService(clusterState); + healthInfo.repositoriesInfoByNode().put(node1.getId(), new RepositoriesHealthInfo(List.of("unknown-repo"), List.of())); + healthInfo.repositoriesInfoByNode().put(node2.getId(), new RepositoriesHealthInfo(List.of(), List.of("invalid-repo"))); + + var corrupted = List.of("corrupted-repo"); + assertThat( + service.calculate(true, healthInfo), + equalTo( + new HealthIndicatorResult( + NAME, + YELLOW, + "Detected [1] corrupted snapshot repository, and [1] unknown snapshot repository, and [1] invalid snapshot repository.", + createDetails(repos.size(), 1, corrupted, 1, 1), + RepositoryIntegrityHealthIndicatorService.IMPACTS, + List.of( + new Diagnosis(CORRUPTED_DEFINITION, List.of(new Diagnosis.Resource(Type.SNAPSHOT_REPOSITORY, corrupted))), + new Diagnosis( + UNKNOWN_DEFINITION, + List.of( + new Diagnosis.Resource(Type.SNAPSHOT_REPOSITORY, List.of("unknown-repo")), + new Diagnosis.Resource(List.of(node1)) + ) + ), + new Diagnosis( + INVALID_DEFINITION, + List.of( + new Diagnosis.Resource(Type.SNAPSHOT_REPOSITORY, List.of("invalid-repo")), + new Diagnosis.Resource(List.of(node2)) + ) + ) + ) ) ) ); @@ -99,10 +224,10 @@ public void testIsRedWhenAtLeastOneRepoIsCorrupted() { public void testIsGreenWhenNoMetadata() { var clusterState = createClusterStateWith(null); - var service = createRepositoryCorruptionHealthIndicatorService(clusterState); + var service = createRepositoryIntegrityHealthIndicatorService(clusterState); assertThat( - service.calculate(false, HealthInfo.EMPTY_HEALTH_INFO), + service.calculate(false, healthInfo), equalTo( new HealthIndicatorResult( NAME, @@ -116,57 +241,119 @@ public void testIsGreenWhenNoMetadata() { ); } - // We expose the indicator name and the diagnoses in the x-pack usage API. In order to index them properly in a telemetry index - // they need to be declared in the health-api-indexer.edn in the telemetry repository. - public void testMappedFieldsForTelemetry() { - assertThat(RepositoryIntegrityHealthIndicatorService.NAME, equalTo("repository_integrity")); + public void testIsUnknownWhenNoHealthInfoIsAvailable() { + var repos = randomList(1, 10, () -> createRepositoryMetadata("healthy-repo", false)); + var clusterState = createClusterStateWith(new RepositoriesMetadata(repos)); + var service = createRepositoryIntegrityHealthIndicatorService(clusterState); + assertThat( - CORRUPTED_REPOSITORY.getUniqueId(), - equalTo("elasticsearch:health:repository_integrity:diagnosis:corrupt_repo_integrity") + service.calculate(true, new HealthInfo(Map.of(), null, Map.of())), + equalTo( + new HealthIndicatorResult( + NAME, + UNKNOWN, + RepositoryIntegrityHealthIndicatorService.NO_REPO_HEALTH_INFO, + new SimpleHealthIndicatorDetails( + Map.of("total_repositories", repos.size(), "corrupted_repositories", 0, "corrupted", List.of()) + ), + Collections.emptyList(), + Collections.emptyList() + ) + ) ); } public void testLimitNumberOfAffectedResources() { - List repos = Stream.iterate(0, n -> n + 1) - .limit(20) - .map(i -> createRepositoryMetadata("corrupted-repo" + i, true)) - .toList(); - var clusterState = createClusterStateWith(new RepositoriesMetadata(repos)); - var service = createRepositoryCorruptionHealthIndicatorService(clusterState); + var ids = Stream.iterate(0, n -> n + 1).limit(20).toList(); + List repos = ids.stream().map(i -> createRepositoryMetadata("corrupted-repo" + i, true)).toList(); + // Create nodes + var discoveryNodesBuilder = DiscoveryNodes.builder(); + ids.forEach(i -> discoveryNodesBuilder.add(DiscoveryNodeUtils.create(randomAlphaOfLength(10), "node-" + i))); + var nodes = discoveryNodesBuilder.build(); + // Create state & service + var clusterState = ClusterState.builder(createClusterStateWith(new RepositoriesMetadata(repos))).nodes(nodes).build(); + var service = createRepositoryIntegrityHealthIndicatorService(clusterState); + // Create repos + final List unknownRepos = new ArrayList<>(); + final List invalidRepos = new ArrayList<>(); + Map repoHealthInfo = new HashMap<>(); + ids.forEach(i -> { + unknownRepos.add("unknown-repo-" + i); + invalidRepos.add("invalid-repo-" + i); + repoHealthInfo.put("node-" + i, new RepositoriesHealthInfo(List.of("unknown-repo-" + i), List.of("invalid-repo-" + i))); + }); + healthInfo = new HealthInfo(healthInfo.diskInfoByNode(), healthInfo.dslHealthInfo(), repoHealthInfo); - { - assertThat( - service.calculate(true, 10, HealthInfo.EMPTY_HEALTH_INFO).diagnosisList(), - equalTo( - List.of( - new Diagnosis( - CORRUPTED_REPOSITORY, - List.of( - new Diagnosis.Resource( - Type.SNAPSHOT_REPOSITORY, - repos.stream().limit(10).map(RepositoryMetadata::name).toList() - ) - ) - ) + assertThat( + service.calculate(true, 10, healthInfo).diagnosisList(), + equalTo(createDiagnoses(repos, nodes, unknownRepos, invalidRepos, 10)) + ); + + assertThat( + service.calculate(true, 0, healthInfo).diagnosisList(), + equalTo(createDiagnoses(repos, nodes, unknownRepos, invalidRepos, 0)) + ); + } + + private List createDiagnoses( + List repos, + DiscoveryNodes nodes, + List unknownRepos, + List invalidRepos, + int maxAffectedResourcesCount + ) { + return List.of( + new Diagnosis( + CORRUPTED_DEFINITION, + List.of( + new Diagnosis.Resource( + Type.SNAPSHOT_REPOSITORY, + repos.stream().map(RepositoryMetadata::name).sorted().limit(maxAffectedResourcesCount).toList() ) ) - ); - } - - { - assertThat( - service.calculate(true, 0, HealthInfo.EMPTY_HEALTH_INFO).diagnosisList(), - equalTo(List.of(new Diagnosis(CORRUPTED_REPOSITORY, List.of(new Diagnosis.Resource(Type.SNAPSHOT_REPOSITORY, List.of()))))) - ); - } + ), + new Diagnosis( + UNKNOWN_DEFINITION, + List.of( + new Diagnosis.Resource( + Type.SNAPSHOT_REPOSITORY, + unknownRepos.stream().sorted().limit(maxAffectedResourcesCount).toList() + ), + new Diagnosis.Resource( + nodes.getAllNodes().stream().sorted(DISCOVERY_NODE_COMPARATOR).limit(maxAffectedResourcesCount).toList() + ) + ) + ), + new Diagnosis( + INVALID_DEFINITION, + List.of( + new Diagnosis.Resource( + Type.SNAPSHOT_REPOSITORY, + invalidRepos.stream().sorted().limit(maxAffectedResourcesCount).toList() + ), + new Diagnosis.Resource( + nodes.getAllNodes().stream().sorted(DISCOVERY_NODE_COMPARATOR).limit(maxAffectedResourcesCount).toList() + ) + ) + ) + ); + } + // We expose the indicator name and the diagnoses in the x-pack usage API. In order to index them properly in a telemetry index + // they need to be declared in the health-api-indexer.edn in the telemetry repository. + public void testMappedFieldsForTelemetry() { + assertEquals("repository_integrity", RepositoryIntegrityHealthIndicatorService.NAME); + assertEquals("elasticsearch:health:repository_integrity:diagnosis:corrupt_repo_integrity", CORRUPTED_DEFINITION.getUniqueId()); + assertEquals("elasticsearch:health:repository_integrity:diagnosis:unknown_repository", UNKNOWN_DEFINITION.getUniqueId()); + assertEquals("elasticsearch:health:repository_integrity:diagnosis:invalid_repository", INVALID_DEFINITION.getUniqueId()); } - private static ClusterState createClusterStateWith(RepositoriesMetadata metadata) { + private ClusterState createClusterStateWith(RepositoriesMetadata metadata) { var builder = ClusterState.builder(new ClusterName("test-cluster")); if (metadata != null) { builder.metadata(Metadata.builder().putCustom(RepositoriesMetadata.TYPE, metadata)); } + builder.nodes(DiscoveryNodes.builder().add(node1).add(node2).build()); return builder.build(); } @@ -174,9 +361,26 @@ private static RepositoryMetadata createRepositoryMetadata(String name, boolean return new RepositoryMetadata(name, "uuid", "s3", Settings.EMPTY, corrupted ? CORRUPTED_REPO_GEN : EMPTY_REPO_GEN, EMPTY_REPO_GEN); } - private static RepositoryIntegrityHealthIndicatorService createRepositoryCorruptionHealthIndicatorService(ClusterState clusterState) { + private static RepositoryIntegrityHealthIndicatorService createRepositoryIntegrityHealthIndicatorService(ClusterState clusterState) { var clusterService = mock(ClusterService.class); when(clusterService.state()).thenReturn(clusterState); return new RepositoryIntegrityHealthIndicatorService(clusterService); } + + private SimpleHealthIndicatorDetails createDetails(int total, int corruptedCount, List corrupted, int unknown, int invalid) { + return new SimpleHealthIndicatorDetails( + Map.of( + "total_repositories", + total, + "corrupted_repositories", + corruptedCount, + "corrupted", + corrupted, + "unknown_repositories", + unknown, + "invalid_repositories", + invalid + ) + ); + } } From cc67205c251b9579f7fbc84aa8e6d4c14f015d33 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 7 Feb 2024 15:31:26 +0100 Subject: [PATCH 106/106] Assign index.downsample.interval setting when downsample index gets created. (#105241) This avoids keeping downsamplingInterval field around. Additionally, the downsample interval is known when downsample interval is invoked and doesn't change. --- .../org/elasticsearch/cluster/metadata/IndexMetadata.java | 3 +-- .../xpack/downsample/TransportDownsampleAction.java | 7 +++---- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java index 83b1c48e69eb9..2730cc8909790 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java @@ -1276,8 +1276,7 @@ public String toString() { INDEX_DOWNSAMPLE_INTERVAL_KEY, "", Property.IndexScope, - Property.InternalIndex, - Property.PrivateIndex + Property.InternalIndex ); // LIFECYCLE_NAME is here an as optimization, see LifecycleSettings.LIFECYCLE_NAME and diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java index b761fcab1b6db..5fd3778942f5b 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java @@ -115,7 +115,6 @@ public class TransportDownsampleAction extends AcknowledgedTransportMasterNodeAc private final IndexScopedSettings indexScopedSettings; private final ThreadContext threadContext; private final PersistentTasksService persistentTasksService; - private String downsamplingInterval; private static final Set FORBIDDEN_SETTINGS = Set.of( IndexSettings.DEFAULT_PIPELINE.getKey(), @@ -184,7 +183,6 @@ protected void masterOperation( ActionListener listener ) { String sourceIndexName = request.getSourceIndex(); - downsamplingInterval = request.getDownsampleConfig().getInterval().toString(); final IndicesAccessControl indicesAccessControl = threadContext.getTransient(AuthorizationServiceField.INDICES_PERMISSIONS_KEY); if (indicesAccessControl != null) { @@ -775,12 +773,14 @@ private void createDownsampleIndex( * case downsample will fail. */ int numberOfReplicas = settings.getAsInt(Downsample.DOWNSAMPLE_MIN_NUMBER_OF_REPLICAS_NAME, 0); + var downsampleInterval = request.getDownsampleConfig().getInterval().toString(); Settings.Builder builder = Settings.builder() .put(IndexMetadata.SETTING_INDEX_HIDDEN, true) .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, sourceIndexMetadata.getNumberOfShards()) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, String.valueOf(numberOfReplicas)) .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "-1") - .put(IndexMetadata.INDEX_DOWNSAMPLE_STATUS.getKey(), DownsampleTaskStatus.STARTED); + .put(IndexMetadata.INDEX_DOWNSAMPLE_STATUS.getKey(), DownsampleTaskStatus.STARTED) + .put(IndexMetadata.INDEX_DOWNSAMPLE_INTERVAL.getKey(), downsampleInterval); if (sourceIndexMetadata.getSettings().hasValue(MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.getKey())) { builder.put( MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING.getKey(), @@ -910,7 +910,6 @@ public ClusterState execute(ClusterState currentState) { Settings.builder() .put(downsampleIndex.getSettings()) .put(IndexMetadata.INDEX_DOWNSAMPLE_STATUS.getKey(), DownsampleTaskStatus.SUCCESS) - .put(IndexMetadata.INDEX_DOWNSAMPLE_INTERVAL.getKey(), downsamplingInterval) .build(), downsampleIndexName );